def DeclarationToParts()

in ext/gtest-1.8.0/googlemock/scripts/generator/cpp/ast.py [0:0]


    def DeclarationToParts(self, parts, needs_name_removed):
        name = None
        default = []
        if needs_name_removed:
            # Handle default (initial) values properly.
            for i, t in enumerate(parts):
                if t.name == '=':
                    default = parts[i+1:]
                    name = parts[i-1].name
                    if name == ']' and parts[i-2].name == '[':
                        name = parts[i-3].name
                        i -= 1
                    parts = parts[:i-1]
                    break
            else:
                if parts[-1].token_type == tokenize.NAME:
                    name = parts.pop().name
                else:
                    # TODO(nnorwitz): this is a hack that happens for code like
                    # Register(Foo<T>); where it thinks this is a function call
                    # but it's actually a declaration.
                    name = '???'
        modifiers = []
        type_name = []
        other_tokens = []
        templated_types = []
        i = 0
        end = len(parts)
        while i < end:
            p = parts[i]
            if keywords.IsKeyword(p.name):
                modifiers.append(p.name)
            elif p.name == '<':
                templated_tokens, new_end = self._GetTemplateEnd(parts, i+1)
                templated_types = self.ToType(templated_tokens)
                i = new_end - 1
                # Don't add a spurious :: to data members being initialized.
                next_index = i + 1
                if next_index < end and parts[next_index].name == '::':
                    i += 1
            elif p.name in ('[', ']', '='):
                # These are handled elsewhere.
                other_tokens.append(p)
            elif p.name not in ('*', '&', '>'):
                # Ensure that names have a space between them.
                if (type_name and type_name[-1].token_type == tokenize.NAME and
                    p.token_type == tokenize.NAME):
                    type_name.append(tokenize.Token(tokenize.SYNTAX, ' ', 0, 0))
                type_name.append(p)
            else:
                other_tokens.append(p)
            i += 1
        type_name = ''.join([t.name for t in type_name])
        return name, type_name, templated_types, modifiers, default, other_tokens