Generated by Cython 0.28.4
Yellow lines hint at Python interaction.
Click on a line that starts with a "+
" to see the C code that Cython generated for it.
Raw output: kmer_ext.cpp
+01: # distutils: language=c++
__pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(0, 1, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
02:
03: from libcpp.string cimport string
04: from libcpp.vector cimport vector
05:
+06: cdef size_t SIZE_OF_INT64 = 8
__pyx_v_8cortexpy_5graph_6parser_8kmer_ext_SIZE_OF_INT64 = 8;
+07: cdef string NUM_TO_LETTER_LIST = b'ACGT'
__pyx_t_1 = __pyx_convert_string_from_py_std__in_string(__pyx_n_b_ACGT); if (unlikely(PyErr_Occurred())) __PYX_ERR(0, 7, __pyx_L1_error) __pyx_v_8cortexpy_5graph_6parser_8kmer_ext_NUM_TO_LETTER_LIST = __pyx_t_1;
08:
+09: def raw_kmer_to_bytes(unsigned kmer_size, const unsigned char[:] kmer_bytes not None):
/* Python wrapper */ static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_1raw_kmer_to_bytes(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_1raw_kmer_to_bytes = {"raw_kmer_to_bytes", (PyCFunction)__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_1raw_kmer_to_bytes, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_1raw_kmer_to_bytes(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { unsigned int __pyx_v_kmer_size; __Pyx_memviewslice __pyx_v_kmer_bytes = { 0, 0, { 0 }, { 0 }, { 0 } }; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_bytes (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_kmer_size,&__pyx_n_s_kmer_bytes,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_size)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_bytes)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("raw_kmer_to_bytes", 1, 2, 2, 1); __PYX_ERR(0, 9, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "raw_kmer_to_bytes") < 0)) __PYX_ERR(0, 9, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_kmer_size = __Pyx_PyInt_As_unsigned_int(values[0]); if (unlikely((__pyx_v_kmer_size == (unsigned int)-1) && PyErr_Occurred())) __PYX_ERR(0, 9, __pyx_L3_error) __pyx_v_kmer_bytes = __Pyx_PyObject_to_MemoryviewSlice_ds_unsigned_char__const__(values[1], 0); if (unlikely(!__pyx_v_kmer_bytes.memview)) __PYX_ERR(0, 9, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("raw_kmer_to_bytes", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 9, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; if (unlikely(((PyObject *)__pyx_v_kmer_bytes.memview) == Py_None)) { PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "kmer_bytes"); __PYX_ERR(0, 9, __pyx_L1_error) } __pyx_r = __pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_raw_kmer_to_bytes(__pyx_self, __pyx_v_kmer_size, __pyx_v_kmer_bytes); /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_raw_kmer_to_bytes(CYTHON_UNUSED PyObject *__pyx_self, unsigned int __pyx_v_kmer_size, __Pyx_memviewslice __pyx_v_kmer_bytes) { std::vector<char> __pyx_v_letters; std::vector<char> __pyx_v_four_letters; char __pyx_v_kmer_byte; size_t __pyx_v_ulong_idx; size_t __pyx_v_ulong_offset; size_t __pyx_v_byte_offset; size_t __pyx_v_pair_idx; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_bytes", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_10); __Pyx_XDECREF(__pyx_t_11); __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_bytes", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_kmer_bytes, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_tuple__23 = PyTuple_Pack(10, __pyx_n_s_kmer_size, __pyx_n_s_kmer_bytes, __pyx_n_s_letters, __pyx_n_s_four_letters, __pyx_n_s_kmer_byte, __pyx_n_s_ulong_idx, __pyx_n_s_ulong_offset, __pyx_n_s_byte_offset, __pyx_n_s_pair_offset, __pyx_n_s_pair_idx); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__23); __Pyx_GIVEREF(__pyx_tuple__23); /* … */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_1raw_kmer_to_bytes, NULL, __pyx_n_s_cortexpy_graph_parser_kmer_ext); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_raw_kmer_to_bytes, __pyx_t_2) < 0) __PYX_ERR(0, 9, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(2, 0, 10, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__23, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cortexpy_graph_parser_kmer_e, __pyx_n_s_raw_kmer_to_bytes, 9, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 9, __pyx_L1_error)
+10: assert kmer_size > 0
#ifndef CYTHON_WITHOUT_ASSERTIONS if (unlikely(!Py_OptimizeFlag)) { if (unlikely(!((__pyx_v_kmer_size > 0) != 0))) { PyErr_SetNone(PyExc_AssertionError); __PYX_ERR(0, 10, __pyx_L1_error) } } #endif
+11: assert kmer_size <= kmer_bytes.shape[0] * 4
#ifndef CYTHON_WITHOUT_ASSERTIONS if (unlikely(!Py_OptimizeFlag)) { if (unlikely(!((__pyx_v_kmer_size <= ((__pyx_v_kmer_bytes.shape[0]) * 4)) != 0))) { PyErr_SetNone(PyExc_AssertionError); __PYX_ERR(0, 11, __pyx_L1_error) } } #endif
12: cdef vector[char] letters
13: cdef vector[char] four_letters
+14: four_letters.resize(4,0)
try {
__pyx_v_four_letters.resize(4, 0);
} catch(...) {
__Pyx_CppExn2PyErr();
__PYX_ERR(0, 14, __pyx_L1_error)
}
15: cdef char kmer_byte
16: cdef size_t ulong_idx, ulong_offset, byte_offset, pair_offset, pair_idx
+17: for ulong_idx in range(kmer_bytes.shape[0]//SIZE_OF_INT64):
if (unlikely(__pyx_v_8cortexpy_5graph_6parser_8kmer_ext_SIZE_OF_INT64 == 0)) { PyErr_SetString(PyExc_ZeroDivisionError, "integer division or modulo by zero"); __PYX_ERR(0, 17, __pyx_L1_error) } __pyx_t_1 = ((__pyx_v_kmer_bytes.shape[0]) / __pyx_v_8cortexpy_5graph_6parser_8kmer_ext_SIZE_OF_INT64); __pyx_t_2 = __pyx_t_1; for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_ulong_idx = __pyx_t_3;
+18: ulong_offset = ulong_idx*SIZE_OF_INT64
__pyx_v_ulong_offset = (__pyx_v_ulong_idx * __pyx_v_8cortexpy_5graph_6parser_8kmer_ext_SIZE_OF_INT64);
+19: for byte_offset in reversed(range(SIZE_OF_INT64)):
for (__pyx_t_4 = __pyx_v_8cortexpy_5graph_6parser_8kmer_ext_SIZE_OF_INT64-1 + 1; __pyx_t_4 >= 0 + 1; ) { __pyx_t_4-=1; __pyx_v_byte_offset = __pyx_t_4;
+20: kmer_byte = kmer_bytes[ulong_offset + byte_offset]
__pyx_t_5 = (__pyx_v_ulong_offset + __pyx_v_byte_offset); __pyx_t_6 = -1; if (unlikely(__pyx_t_5 >= (size_t)__pyx_v_kmer_bytes.shape[0])) __pyx_t_6 = 0; if (unlikely(__pyx_t_6 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_6); __PYX_ERR(0, 20, __pyx_L1_error) } __pyx_v_kmer_byte = (*((unsigned char const *) ( /* dim=0 */ (__pyx_v_kmer_bytes.data + __pyx_t_5 * __pyx_v_kmer_bytes.strides[0]) )));
+21: for pair_idx in range(4):
for (__pyx_t_7 = 0; __pyx_t_7 < 4; __pyx_t_7+=1) { __pyx_v_pair_idx = __pyx_t_7;
+22: four_letters[3-pair_idx] = NUM_TO_LETTER_LIST[kmer_byte & 0x3]
(__pyx_v_four_letters[(3 - __pyx_v_pair_idx)]) = (__pyx_v_8cortexpy_5graph_6parser_8kmer_ext_NUM_TO_LETTER_LIST[(__pyx_v_kmer_byte & 0x3)]);
+23: kmer_byte >>= 2
__pyx_v_kmer_byte = (__pyx_v_kmer_byte >> 2); }
+24: for kmer_byte in four_letters:
__pyx_t_8 = __pyx_v_four_letters.begin(); for (;;) { if (!(__pyx_t_8 != __pyx_v_four_letters.end())) break; __pyx_t_9 = *__pyx_t_8; ++__pyx_t_8; __pyx_v_kmer_byte = __pyx_t_9; /* … */ } } }
+25: letters.push_back(kmer_byte)
try {
__pyx_v_letters.push_back(__pyx_v_kmer_byte);
} catch(...) {
__Pyx_CppExn2PyErr();
__PYX_ERR(0, 25, __pyx_L1_error)
}
+26: return bytes(letters[(letters.size() - kmer_size):])
__Pyx_XDECREF(__pyx_r); __pyx_t_10 = __pyx_convert_vector_to_py_char(__pyx_v_letters); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 26, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __pyx_t_11 = __Pyx_PyObject_GetSlice(__pyx_t_10, (__pyx_v_letters.size() - __pyx_v_kmer_size), 0, NULL, NULL, NULL, 1, 0, 1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 26, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_11); __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; __pyx_t_10 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyBytes_Type)), __pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 26, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; __pyx_r = __pyx_t_10; __pyx_t_10 = 0; goto __pyx_L0;
27:
28: # the above code was reimplemented from this
29: # kmer_as_uint64ts = np.frombuffer(raw_kmer, dtype='<u8')
30: # big_endian_kmer = kmer_as_uint64ts.astype('>u8')
31: # kmer_as_bits = np.unpackbits(np.frombuffer(big_endian_kmer.tobytes(), dtype=np.uint8))
32: # kmer = (kmer_as_bits.reshape(-1, 2) * np.array([2, 1])).sum(1)
33: # return NUM_TO_LETTER[kmer[(len(kmer) - self.kmer_size):]]
34:
+35: def raw_kmer_to_string(int kmer_size, kmer_bytes):
/* Python wrapper */ static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_3raw_kmer_to_string(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_3raw_kmer_to_string = {"raw_kmer_to_string", (PyCFunction)__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_3raw_kmer_to_string, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_3raw_kmer_to_string(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_v_kmer_size; PyObject *__pyx_v_kmer_bytes = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_string (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_kmer_size,&__pyx_n_s_kmer_bytes,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_size)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_bytes)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("raw_kmer_to_string", 1, 2, 2, 1); __PYX_ERR(0, 35, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "raw_kmer_to_string") < 0)) __PYX_ERR(0, 35, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_kmer_size = __Pyx_PyInt_As_int(values[0]); if (unlikely((__pyx_v_kmer_size == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 35, __pyx_L3_error) __pyx_v_kmer_bytes = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("raw_kmer_to_string", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 35, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_string", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_2raw_kmer_to_string(__pyx_self, __pyx_v_kmer_size, __pyx_v_kmer_bytes); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_2raw_kmer_to_string(CYTHON_UNUSED PyObject *__pyx_self, int __pyx_v_kmer_size, PyObject *__pyx_v_kmer_bytes) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_string", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_string", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_tuple__25 = PyTuple_Pack(2, __pyx_n_s_kmer_size, __pyx_n_s_kmer_bytes); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__25); __Pyx_GIVEREF(__pyx_tuple__25); /* … */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_3raw_kmer_to_string, NULL, __pyx_n_s_cortexpy_graph_parser_kmer_ext); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_raw_kmer_to_string, __pyx_t_2) < 0) __PYX_ERR(0, 35, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cortexpy_graph_parser_kmer_e, __pyx_n_s_raw_kmer_to_string, 35, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 35, __pyx_L1_error)
+36: return raw_kmer_to_bytes(kmer_size, kmer_bytes).decode('utf8')
__Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_raw_kmer_to_bytes); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_kmer_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_5 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_kmer_bytes}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_kmer_bytes}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif { __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (__pyx_t_4) { __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; } __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); __Pyx_INCREF(__pyx_v_kmer_bytes); __Pyx_GIVEREF(__pyx_v_kmer_bytes); PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_kmer_bytes); __pyx_t_3 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_decode); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_r = __pyx_t_1; __pyx_t_1 = 0; goto __pyx_L0; /* … */ __pyx_tuple_ = PyTuple_Pack(1, __pyx_n_s_utf8); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 36, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple_); __Pyx_GIVEREF(__pyx_tuple_);
37:
+38: def raw_kmer_to_list(int kmer_size, kmer_bytes):
/* Python wrapper */ static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_5raw_kmer_to_list(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_5raw_kmer_to_list = {"raw_kmer_to_list", (PyCFunction)__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_5raw_kmer_to_list, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_5raw_kmer_to_list(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { int __pyx_v_kmer_size; PyObject *__pyx_v_kmer_bytes = 0; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_list (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_kmer_size,&__pyx_n_s_kmer_bytes,0}; PyObject* values[2] = {0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_size)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_kmer_bytes)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("raw_kmer_to_list", 1, 2, 2, 1); __PYX_ERR(0, 38, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "raw_kmer_to_list") < 0)) __PYX_ERR(0, 38, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); } __pyx_v_kmer_size = __Pyx_PyInt_As_int(values[0]); if (unlikely((__pyx_v_kmer_size == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 38, __pyx_L3_error) __pyx_v_kmer_bytes = values[1]; } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("raw_kmer_to_list", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 38, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_list", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; __pyx_r = __pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_4raw_kmer_to_list(__pyx_self, __pyx_v_kmer_size, __pyx_v_kmer_bytes); /* function exit code */ __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_4raw_kmer_to_list(CYTHON_UNUSED PyObject *__pyx_self, int __pyx_v_kmer_size, PyObject *__pyx_v_kmer_bytes) { PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_kmer_to_list", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_2); __Pyx_XDECREF(__pyx_t_3); __Pyx_XDECREF(__pyx_t_4); __Pyx_XDECREF(__pyx_t_6); __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_kmer_to_list", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_tuple__27 = PyTuple_Pack(2, __pyx_n_s_kmer_size, __pyx_n_s_kmer_bytes); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__27); __Pyx_GIVEREF(__pyx_tuple__27); /* … */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_5raw_kmer_to_list, NULL, __pyx_n_s_cortexpy_graph_parser_kmer_ext); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_raw_kmer_to_list, __pyx_t_2) < 0) __PYX_ERR(0, 38, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(2, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cortexpy_graph_parser_kmer_e, __pyx_n_s_raw_kmer_to_list, 38, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(0, 38, __pyx_L1_error)
+39: return list(raw_kmer_to_string(kmer_size, kmer_bytes))
__Pyx_XDECREF(__pyx_r); __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_raw_kmer_to_string); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_kmer_size); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_3); __pyx_t_4 = NULL; __pyx_t_5 = 0; if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); if (likely(__pyx_t_4)) { PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); __Pyx_INCREF(__pyx_t_4); __Pyx_INCREF(function); __Pyx_DECREF_SET(__pyx_t_2, function); __pyx_t_5 = 1; } } #if CYTHON_FAST_PYCALL if (PyFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_kmer_bytes}; __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif #if CYTHON_FAST_PYCCALL if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_t_3, __pyx_v_kmer_bytes}; __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; } else #endif { __pyx_t_6 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_6); if (__pyx_t_4) { __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; } __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_3); __Pyx_INCREF(__pyx_v_kmer_bytes); __Pyx_GIVEREF(__pyx_v_kmer_bytes); PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_kmer_bytes); __pyx_t_3 = 0; __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; } __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_t_2 = PySequence_List(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 39, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_r = __pyx_t_2; __pyx_t_2 = 0; goto __pyx_L0;
40:
+41: def raw_edges_to_list(const unsigned char[:] edge_bytes not None):
/* Python wrapper */ static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_7raw_edges_to_list(PyObject *__pyx_self, PyObject *__pyx_arg_edge_bytes); /*proto*/ static PyMethodDef __pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_7raw_edges_to_list = {"raw_edges_to_list", (PyCFunction)__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_7raw_edges_to_list, METH_O, 0}; static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_7raw_edges_to_list(PyObject *__pyx_self, PyObject *__pyx_arg_edge_bytes) { __Pyx_memviewslice __pyx_v_edge_bytes = { 0, 0, { 0 }, { 0 }, { 0 } }; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_edges_to_list (wrapper)", 0); assert(__pyx_arg_edge_bytes); { __pyx_v_edge_bytes = __Pyx_PyObject_to_MemoryviewSlice_ds_unsigned_char__const__(__pyx_arg_edge_bytes, 0); if (unlikely(!__pyx_v_edge_bytes.memview)) __PYX_ERR(0, 41, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L3_error:; __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_edges_to_list", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; if (unlikely(((PyObject *)__pyx_v_edge_bytes.memview) == Py_None)) { PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "edge_bytes"); __PYX_ERR(0, 41, __pyx_L1_error) } __pyx_r = __pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_6raw_edges_to_list(__pyx_self, __pyx_v_edge_bytes); /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_6raw_edges_to_list(CYTHON_UNUSED PyObject *__pyx_self, __Pyx_memviewslice __pyx_v_edge_bytes) { char __pyx_v_e_byte; int __pyx_v_i; int __pyx_v_e_byte_idx; std::vector<int> __pyx_v_edge_set; PyObject *__pyx_v_tuples = NULL; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_edges_to_list", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_1); __Pyx_XDECREF(__pyx_t_7); __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_edges_to_list", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_edge_bytes, 1); __Pyx_XDECREF(__pyx_v_tuples); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_tuple__29 = PyTuple_Pack(7, __pyx_n_s_edge_bytes, __pyx_n_s_edge_bytes, __pyx_n_s_e_byte, __pyx_n_s_i, __pyx_n_s_e_byte_idx, __pyx_n_s_edge_set, __pyx_n_s_tuples); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(0, 41, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__29); __Pyx_GIVEREF(__pyx_tuple__29); /* … */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_7raw_edges_to_list, NULL, __pyx_n_s_cortexpy_graph_parser_kmer_ext); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 41, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_raw_edges_to_list, __pyx_t_2) < 0) __PYX_ERR(0, 41, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(1, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cortexpy_graph_parser_kmer_e, __pyx_n_s_raw_edges_to_list, 41, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(0, 41, __pyx_L1_error)
42: cdef char e_byte
43: cdef int i, e_byte_idx
44: cdef vector[int] edge_set
+45: edge_set.resize(8, 0)
try {
__pyx_v_edge_set.resize(8, 0);
} catch(...) {
__Pyx_CppExn2PyErr();
__PYX_ERR(0, 45, __pyx_L1_error)
}
+46: tuples = []
__pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 46, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_v_tuples = ((PyObject*)__pyx_t_1); __pyx_t_1 = 0;
+47: for e_byte_idx in range(edge_bytes.shape[0]):
__pyx_t_2 = (__pyx_v_edge_bytes.shape[0]); __pyx_t_3 = __pyx_t_2; for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) { __pyx_v_e_byte_idx = __pyx_t_4;
+48: e_byte = edge_bytes[e_byte_idx]
__pyx_t_5 = __pyx_v_e_byte_idx; __pyx_t_6 = -1; if (__pyx_t_5 < 0) { __pyx_t_5 += __pyx_v_edge_bytes.shape[0]; if (unlikely(__pyx_t_5 < 0)) __pyx_t_6 = 0; } else if (unlikely(__pyx_t_5 >= __pyx_v_edge_bytes.shape[0])) __pyx_t_6 = 0; if (unlikely(__pyx_t_6 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_6); __PYX_ERR(0, 48, __pyx_L1_error) } __pyx_v_e_byte = (*((unsigned char const *) ( /* dim=0 */ (__pyx_v_edge_bytes.data + __pyx_t_5 * __pyx_v_edge_bytes.strides[0]) )));
+49: for i in range(8):
for (__pyx_t_6 = 0; __pyx_t_6 < 8; __pyx_t_6+=1) { __pyx_v_i = __pyx_t_6;
+50: edge_set[7-i] = e_byte & 0x1
(__pyx_v_edge_set[(7 - __pyx_v_i)]) = (__pyx_v_e_byte & 0x1);
+51: e_byte >>= 1
__pyx_v_e_byte = (__pyx_v_e_byte >> 1); }
+52: tuples.append(tuple(edge_set))
__pyx_t_1 = __pyx_convert_vector_to_py_int(__pyx_v_edge_set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 52, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_1); __pyx_t_7 = __Pyx_PySequence_Tuple(__pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 52, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_7); __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_tuples, __pyx_t_7); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 52, __pyx_L1_error) __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; }
+53: return tuples
__Pyx_XDECREF(__pyx_r); __Pyx_INCREF(__pyx_v_tuples); __pyx_r = __pyx_v_tuples; goto __pyx_L0;
54:
55: # edge_bytes = np.frombuffer(self._data[start:], dtype=np.uint8)
56: # edge_sets = np.unpackbits(edge_bytes)
57: # edge_sets = edge_sets.reshape(-1, 8)
58: # edge_sets = [EdgeSet(tuple(edge_set.tolist())) for edge_set in edge_sets]
59: # self._edges = edge_sets
60:
+61: def raw_to_coverage(const unsigned char[:] buffer not None, size_t offset, size_t num_colors):
/* Python wrapper */ static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_9raw_to_coverage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static PyMethodDef __pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_9raw_to_coverage = {"raw_to_coverage", (PyCFunction)__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_9raw_to_coverage, METH_VARARGS|METH_KEYWORDS, 0}; static PyObject *__pyx_pw_8cortexpy_5graph_6parser_8kmer_ext_9raw_to_coverage(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { __Pyx_memviewslice __pyx_v_buffer = { 0, 0, { 0 }, { 0 }, { 0 } }; size_t __pyx_v_offset; size_t __pyx_v_num_colors; PyObject *__pyx_r = 0; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_to_coverage (wrapper)", 0); { static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_buffer,&__pyx_n_s_offset,&__pyx_n_s_num_colors,0}; PyObject* values[3] = {0,0,0}; if (unlikely(__pyx_kwds)) { Py_ssize_t kw_args; const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); switch (pos_args) { case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); CYTHON_FALLTHROUGH; case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); CYTHON_FALLTHROUGH; case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); CYTHON_FALLTHROUGH; case 0: break; default: goto __pyx_L5_argtuple_error; } kw_args = PyDict_Size(__pyx_kwds); switch (pos_args) { case 0: if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_buffer)) != 0)) kw_args--; else goto __pyx_L5_argtuple_error; CYTHON_FALLTHROUGH; case 1: if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_offset)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("raw_to_coverage", 1, 3, 3, 1); __PYX_ERR(0, 61, __pyx_L3_error) } CYTHON_FALLTHROUGH; case 2: if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_num_colors)) != 0)) kw_args--; else { __Pyx_RaiseArgtupleInvalid("raw_to_coverage", 1, 3, 3, 2); __PYX_ERR(0, 61, __pyx_L3_error) } } if (unlikely(kw_args > 0)) { if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "raw_to_coverage") < 0)) __PYX_ERR(0, 61, __pyx_L3_error) } } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { goto __pyx_L5_argtuple_error; } else { values[0] = PyTuple_GET_ITEM(__pyx_args, 0); values[1] = PyTuple_GET_ITEM(__pyx_args, 1); values[2] = PyTuple_GET_ITEM(__pyx_args, 2); } __pyx_v_buffer = __Pyx_PyObject_to_MemoryviewSlice_ds_unsigned_char__const__(values[0], 0); if (unlikely(!__pyx_v_buffer.memview)) __PYX_ERR(0, 61, __pyx_L3_error) __pyx_v_offset = __Pyx_PyInt_As_size_t(values[1]); if (unlikely((__pyx_v_offset == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 61, __pyx_L3_error) __pyx_v_num_colors = __Pyx_PyInt_As_size_t(values[2]); if (unlikely((__pyx_v_num_colors == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 61, __pyx_L3_error) } goto __pyx_L4_argument_unpacking_done; __pyx_L5_argtuple_error:; __Pyx_RaiseArgtupleInvalid("raw_to_coverage", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 61, __pyx_L3_error) __pyx_L3_error:; __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_to_coverage", __pyx_clineno, __pyx_lineno, __pyx_filename); __Pyx_RefNannyFinishContext(); return NULL; __pyx_L4_argument_unpacking_done:; if (unlikely(((PyObject *)__pyx_v_buffer.memview) == Py_None)) { PyErr_Format(PyExc_TypeError, "Argument '%.200s' must not be None", "buffer"); __PYX_ERR(0, 61, __pyx_L1_error) } __pyx_r = __pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_8raw_to_coverage(__pyx_self, __pyx_v_buffer, __pyx_v_offset, __pyx_v_num_colors); /* function exit code */ goto __pyx_L0; __pyx_L1_error:; __pyx_r = NULL; __pyx_L0:; __Pyx_RefNannyFinishContext(); return __pyx_r; } static PyObject *__pyx_pf_8cortexpy_5graph_6parser_8kmer_ext_8raw_to_coverage(CYTHON_UNUSED PyObject *__pyx_self, __Pyx_memviewslice __pyx_v_buffer, size_t __pyx_v_offset, size_t __pyx_v_num_colors) { unsigned int __pyx_v_coverage; std::vector<unsigned int> __pyx_v_coverages; CYTHON_UNUSED size_t __pyx_v_color; PyObject *__pyx_r = NULL; __Pyx_RefNannyDeclarations __Pyx_RefNannySetupContext("raw_to_coverage", 0); /* … */ /* function exit code */ __pyx_L1_error:; __Pyx_XDECREF(__pyx_t_9); __Pyx_XDECREF(__pyx_t_10); __Pyx_AddTraceback("cortexpy.graph.parser.kmer_ext.raw_to_coverage", __pyx_clineno, __pyx_lineno, __pyx_filename); __pyx_r = NULL; __pyx_L0:; __PYX_XDEC_MEMVIEW(&__pyx_v_buffer, 1); __Pyx_XGIVEREF(__pyx_r); __Pyx_RefNannyFinishContext(); return __pyx_r; } /* … */ __pyx_tuple__31 = PyTuple_Pack(6, __pyx_n_s_buffer, __pyx_n_s_offset, __pyx_n_s_num_colors, __pyx_n_s_coverage, __pyx_n_s_coverages, __pyx_n_s_color); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(0, 61, __pyx_L1_error) __Pyx_GOTREF(__pyx_tuple__31); __Pyx_GIVEREF(__pyx_tuple__31); /* … */ __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_8cortexpy_5graph_6parser_8kmer_ext_9raw_to_coverage, NULL, __pyx_n_s_cortexpy_graph_parser_kmer_ext); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 61, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_2); if (PyDict_SetItem(__pyx_d, __pyx_n_s_raw_to_coverage, __pyx_t_2) < 0) __PYX_ERR(0, 61, __pyx_L1_error) __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(3, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__31, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_src_cortexpy_graph_parser_kmer_e, __pyx_n_s_raw_to_coverage, 61, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(0, 61, __pyx_L1_error)
62: cdef unsigned coverage
63: cdef vector[unsigned] coverages
+64: coverages.reserve(num_colors)
__pyx_v_coverages.reserve(__pyx_v_num_colors);
+65: for color in range(num_colors):
__pyx_t_1 = __pyx_v_num_colors; __pyx_t_2 = __pyx_t_1; for (__pyx_t_3 = 0; __pyx_t_3 < __pyx_t_2; __pyx_t_3+=1) { __pyx_v_color = __pyx_t_3;
+66: coverage = (buffer[offset]<<0) | (buffer[offset+1]<<8) | (buffer[offset+2]<<16) | (buffer[offset+3]<<24)
__pyx_t_4 = __pyx_v_offset; __pyx_t_5 = -1; if (unlikely(__pyx_t_4 >= (size_t)__pyx_v_buffer.shape[0])) __pyx_t_5 = 0; if (unlikely(__pyx_t_5 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_5); __PYX_ERR(0, 66, __pyx_L1_error) } __pyx_t_6 = (__pyx_v_offset + 1); __pyx_t_5 = -1; if (unlikely(__pyx_t_6 >= (size_t)__pyx_v_buffer.shape[0])) __pyx_t_5 = 0; if (unlikely(__pyx_t_5 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_5); __PYX_ERR(0, 66, __pyx_L1_error) } __pyx_t_7 = (__pyx_v_offset + 2); __pyx_t_5 = -1; if (unlikely(__pyx_t_7 >= (size_t)__pyx_v_buffer.shape[0])) __pyx_t_5 = 0; if (unlikely(__pyx_t_5 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_5); __PYX_ERR(0, 66, __pyx_L1_error) } __pyx_t_8 = (__pyx_v_offset + 3); __pyx_t_5 = -1; if (unlikely(__pyx_t_8 >= (size_t)__pyx_v_buffer.shape[0])) __pyx_t_5 = 0; if (unlikely(__pyx_t_5 != -1)) { __Pyx_RaiseBufferIndexError(__pyx_t_5); __PYX_ERR(0, 66, __pyx_L1_error) } __pyx_v_coverage = (((((*((unsigned char const *) ( /* dim=0 */ (__pyx_v_buffer.data + __pyx_t_4 * __pyx_v_buffer.strides[0]) ))) << 0) | ((*((unsigned char const *) ( /* dim=0 */ (__pyx_v_buffer.data + __pyx_t_6 * __pyx_v_buffer.strides[0]) ))) << 8)) | ((*((unsigned char const *) ( /* dim=0 */ (__pyx_v_buffer.data + __pyx_t_7 * __pyx_v_buffer.strides[0]) ))) << 16)) | ((*((unsigned char const *) ( /* dim=0 */ (__pyx_v_buffer.data + __pyx_t_8 * __pyx_v_buffer.strides[0]) ))) << 24));
+67: coverages.push_back(coverage)
try {
__pyx_v_coverages.push_back(__pyx_v_coverage);
} catch(...) {
__Pyx_CppExn2PyErr();
__PYX_ERR(0, 67, __pyx_L1_error)
}
+68: offset += 4
__pyx_v_offset = (__pyx_v_offset + 4); }
+69: return tuple(coverages)
__Pyx_XDECREF(__pyx_r); __pyx_t_9 = __pyx_convert_vector_to_py_unsigned_int(__pyx_v_coverages); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 69, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_9); __pyx_t_10 = __Pyx_PySequence_Tuple(__pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 69, __pyx_L1_error) __Pyx_GOTREF(__pyx_t_10); __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; __pyx_r = __pyx_t_10; __pyx_t_10 = 0; goto __pyx_L0;
70:
71: # originally:
72: # start = self.kmer_container_size_in_uint64ts * UINT64_T
73: # coverage_raw = self._data[start:(start + self.num_colors * UINT32_T)]
74: # fmt_string = ''.join(['I' for _ in range(self.num_colors)])
75: # self._coverage = unpack(fmt_string, coverage_raw)