LCOV - code coverage report
Current view: top level - Python/clinic - Python-tokenize.c.h (source / functions) Hit Total Coverage
Test: CPython 3.12 LCOV report [commit acb105a7c1f] Lines: 12 18 66.7 %
Date: 2022-07-20 13:12:14 Functions: 1 1 100.0 %
Branches: 8 16 50.0 %

           Branch data     Line data    Source code
       1                 :            : /*[clinic input]
       2                 :            : preserve
       3                 :            : [clinic start generated code]*/
       4                 :            : 
       5                 :            : static PyObject *
       6                 :            : tokenizeriter_new_impl(PyTypeObject *type, const char *source);
       7                 :            : 
       8                 :            : static PyObject *
       9                 :        114 : tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
      10                 :            : {
      11                 :        114 :     PyObject *return_value = NULL;
      12                 :            :     static const char * const _keywords[] = {"source", NULL};
      13                 :            :     static _PyArg_Parser _parser = {NULL, _keywords, "tokenizeriter", 0};
      14                 :            :     PyObject *argsbuf[1];
      15                 :            :     PyObject * const *fastargs;
      16                 :        114 :     Py_ssize_t nargs = PyTuple_GET_SIZE(args);
      17                 :            :     const char *source;
      18                 :            : 
      19   [ +  -  +  -  :        114 :     fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf);
             +  -  +  - ]
      20         [ -  + ]:        114 :     if (!fastargs) {
      21                 :          0 :         goto exit;
      22                 :            :     }
      23         [ -  + ]:        114 :     if (!PyUnicode_Check(fastargs[0])) {
      24                 :          0 :         _PyArg_BadArgument("tokenizeriter", "argument 'source'", "str", fastargs[0]);
      25                 :          0 :         goto exit;
      26                 :            :     }
      27                 :            :     Py_ssize_t source_length;
      28                 :        114 :     source = PyUnicode_AsUTF8AndSize(fastargs[0], &source_length);
      29         [ -  + ]:        114 :     if (source == NULL) {
      30                 :          0 :         goto exit;
      31                 :            :     }
      32         [ -  + ]:        114 :     if (strlen(source) != (size_t)source_length) {
      33                 :          0 :         PyErr_SetString(PyExc_ValueError, "embedded null character");
      34                 :          0 :         goto exit;
      35                 :            :     }
      36                 :        114 :     return_value = tokenizeriter_new_impl(type, source);
      37                 :            : 
      38                 :        114 : exit:
      39                 :        114 :     return return_value;
      40                 :            : }
      41                 :            : /*[clinic end generated code: output=dfcd64774e01bfe6 input=a9049054013a1b77]*/

Generated by: LCOV version 1.14