From 1086c9402fc9a87bca4588a754b59addc2f9b8b5 Mon Sep 17 00:00:00 2001 From: Luis Felipe Mileo Date: Wed, 22 Jul 2020 07:11:05 -0300 Subject: [PATCH 1/3] [NEW] Nota Paulistana v2 Signed-off-by: Luis Felipe Mileo --- nfselib/paulistana/__init__.py | 0 .../v02/PedidoCancelamentoLote_v01.py | 1058 ++++ .../v02/PedidoCancelamentoNFTS_v01.py | 1200 +++++ .../v02/PedidoCancelamentoNFe_v01.py | 1161 +++++ .../paulistana/v02/PedidoConsultaCNPJ_v01.py | 1058 ++++ .../paulistana/v02/PedidoConsultaCPOM_v01.py | 1159 +++++ .../v02/PedidoConsultaEmissaoNFSE_v01.py | 1160 +++++ .../PedidoConsultaInformacoesLoteNFTS_v01.py | 1160 +++++ .../v02/PedidoConsultaLoteNFTS_v01.py | 1160 +++++ .../paulistana/v02/PedidoConsultaLote_v01.py | 1057 ++++ .../paulistana/v02/PedidoConsultaNFTS_v01.py | 1163 +++++ .../v02/PedidoConsultaNFePeriodo_v01.py | 1102 ++++ .../paulistana/v02/PedidoConsultaNFe_v01.py | 1145 +++++ .../paulistana/v02/PedidoEnvioLoteNFTS_v01.py | 1182 +++++ .../paulistana/v02/PedidoEnvioLoteRPS_v01.py | 1128 +++++ nfselib/paulistana/v02/PedidoEnvioNFTS_v01.py | 1084 ++++ nfselib/paulistana/v02/PedidoEnvioRPS_v01.py | 1057 ++++ .../v02/PedidoInformacoesLote_v01.py | 1066 ++++ .../v02/RetornoCancelamentoNFTS_v01.py | 990 ++++ .../v02/RetornoCancelamentoNFe_v01.py | 1064 ++++ .../paulistana/v02/RetornoConsultaCNPJ_v01.py | 1166 +++++ .../paulistana/v02/RetornoConsultaCPOM_v01.py | 987 ++++ .../v02/RetornoConsultaEmissaoNFSE_v01.py | 987 ++++ .../RetornoConsultaInformacoesLoteNFTS_v01.py | 990 ++++ .../paulistana/v02/RetornoConsultaNFTS_v01.py | 992 ++++ nfselib/paulistana/v02/RetornoConsulta_v01.py | 1077 ++++ .../v02/RetornoEnvioLoteNFTS_v01.py | 987 ++++ .../paulistana/v02/RetornoEnvioLoteRPS_v01.py | 1085 ++++ .../paulistana/v02/RetornoEnvioNFTS_v01.py | 999 ++++ nfselib/paulistana/v02/RetornoEnvioRPS_v01.py | 1072 ++++ .../v02/RetornoInformacoesLote_v01.py | 1073 ++++ nfselib/paulistana/v02/TiposNFTS_v01.py | 4413 +++++++++++++++++ nfselib/paulistana/v02/TiposNFe_v01.py | 3466 +++++++++++++ nfselib/paulistana/v02/__init__.py | 0 .../paulistana/v02/xmldsig-core-schema_v01.py | 2009 ++++++++ .../v02/PedidoCancelamentoLote_v01.xsd | 47 + .../v02/PedidoCancelamentoNFTS_v01.xsd | 71 + .../v02/PedidoCancelamentoNFe_v01.xsd | 66 + .../paulistana/v02/PedidoConsultaCNPJ_v01.xsd | 47 + .../paulistana/v02/PedidoConsultaCPOM_v01.xsd | 57 + .../v02/PedidoConsultaEmissaoNFSE_v01.xsd | 57 + .../PedidoConsultaInformacoesLoteNFTS_v01.xsd | 56 + .../v02/PedidoConsultaLoteNFTS_v01.xsd | 56 + .../paulistana/v02/PedidoConsultaLote_v01.xsd | 47 + .../paulistana/v02/PedidoConsultaNFTS_v01.xsd | 59 + .../v02/PedidoConsultaNFePeriodo_v01.xsd | 69 + .../paulistana/v02/PedidoConsultaNFe_v01.xsd | 53 + .../v02/PedidoEnvioLoteNFTS_v01.xsd | 79 + .../paulistana/v02/PedidoEnvioLoteRPS_v01.xsd | 77 + .../paulistana/v02/PedidoEnvioNFTS_v01.xsd | 49 + schemas/paulistana/v02/PedidoEnvioRPS_v01.xsd | 47 + .../v02/PedidoInformacoesLote_v01.xsd | 52 + .../v02/RetornoCancelamentoNFTS_v01.xsd | 28 + .../v02/RetornoCancelamentoNFe_v01.xsd | 47 + .../v02/RetornoConsultaCNPJ_v01.xsd | 63 + .../v02/RetornoConsultaCPOM_v01.xsd | 23 + .../v02/RetornoConsultaEmissaoNFSE_v01.xsd | 23 + ...RetornoConsultaInformacoesLoteNFTS_v01.xsd | 25 + .../v02/RetornoConsultaNFTS_v01.xsd | 25 + .../paulistana/v02/RetornoConsulta_v01.xsd | 52 + .../v02/RetornoEnvioLoteNFTS_v01.xsd | 24 + .../v02/RetornoEnvioLoteRPS_v01.xsd | 57 + .../paulistana/v02/RetornoEnvioNFTS_v01.xsd | 29 + .../paulistana/v02/RetornoEnvioRPS_v01.xsd | 52 + .../v02/RetornoInformacoesLote_v01.xsd | 52 + schemas/paulistana/v02/TiposNFTS_v01.xsd | 1113 +++++ schemas/paulistana/v02/TiposNFe_v01.xsd | 975 ++++ .../v02/xmldsig-core-schema_v01.xsd | 95 + 68 files changed, 46129 insertions(+) create mode 100644 nfselib/paulistana/__init__.py create mode 100644 nfselib/paulistana/v02/PedidoCancelamentoLote_v01.py create mode 100644 nfselib/paulistana/v02/PedidoCancelamentoNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoCancelamentoNFe_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaCNPJ_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaCPOM_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaLoteNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaLote_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaNFePeriodo_v01.py create mode 100644 nfselib/paulistana/v02/PedidoConsultaNFe_v01.py create mode 100644 nfselib/paulistana/v02/PedidoEnvioLoteNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoEnvioNFTS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoEnvioRPS_v01.py create mode 100644 nfselib/paulistana/v02/PedidoInformacoesLote_v01.py create mode 100644 nfselib/paulistana/v02/RetornoCancelamentoNFTS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoCancelamentoNFe_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsultaCNPJ_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsultaCPOM_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsultaNFTS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoConsulta_v01.py create mode 100644 nfselib/paulistana/v02/RetornoEnvioLoteNFTS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoEnvioLoteRPS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoEnvioNFTS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoEnvioRPS_v01.py create mode 100644 nfselib/paulistana/v02/RetornoInformacoesLote_v01.py create mode 100644 nfselib/paulistana/v02/TiposNFTS_v01.py create mode 100644 nfselib/paulistana/v02/TiposNFe_v01.py create mode 100644 nfselib/paulistana/v02/__init__.py create mode 100644 nfselib/paulistana/v02/xmldsig-core-schema_v01.py create mode 100644 schemas/paulistana/v02/PedidoCancelamentoLote_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoCancelamentoNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoCancelamentoNFe_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaCNPJ_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaCPOM_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaLoteNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaLote_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaNFePeriodo_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoConsultaNFe_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoEnvioLoteNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoEnvioLoteRPS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoEnvioNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoEnvioRPS_v01.xsd create mode 100644 schemas/paulistana/v02/PedidoInformacoesLote_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoCancelamentoNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoCancelamentoNFe_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsultaCNPJ_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsultaCPOM_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsultaNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoConsulta_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoEnvioLoteNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoEnvioLoteRPS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoEnvioNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoEnvioRPS_v01.xsd create mode 100644 schemas/paulistana/v02/RetornoInformacoesLote_v01.xsd create mode 100644 schemas/paulistana/v02/TiposNFTS_v01.xsd create mode 100644 schemas/paulistana/v02/TiposNFe_v01.xsd create mode 100644 schemas/paulistana/v02/xmldsig-core-schema_v01.xsd diff --git a/nfselib/paulistana/__init__.py b/nfselib/paulistana/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nfselib/paulistana/v02/PedidoCancelamentoLote_v01.py b/nfselib/paulistana/v02/PedidoCancelamentoLote_v01.py new file mode 100644 index 0000000..e9da957 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoCancelamentoLote_v01.py @@ -0,0 +1,1058 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoCancelamentoLote_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoCancelamentoLote_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoCancelamentoLote_v01.py" ./process_includes/PedidoCancelamentoLote_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoCancelamentoLote(GeneratedsSuper): + """Schema utilizado para PEDIDO de cancelamento de lote.Este Schema XML + é utilizado pelos prestadores de serviços cancelarem as NFS-e + geradas a partir de um lote de RPS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoCancelamentoLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoCancelamentoLote.subclass: + return PedidoCancelamentoLote.subclass(*args_, **kwargs_) + else: + return PedidoCancelamentoLote(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoCancelamentoLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoCancelamentoLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoCancelamentoLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoCancelamentoLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoCancelamentoLote + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de cancelamento de lote.Informe a Versão do + Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, NumeroLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.NumeroLote = NumeroLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + self.NumeroLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroLote), input_name='NumeroLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'NumeroLote': + NumeroLote_ = child_.text + NumeroLote_ = self.gds_validate_string(NumeroLote_, node, 'NumeroLote') + self.NumeroLote = NumeroLote_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoLote' + rootClass = PedidoCancelamentoLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoLote' + rootClass = PedidoCancelamentoLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoLote' + rootClass = PedidoCancelamentoLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoLote' + rootClass = PedidoCancelamentoLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoCancelamentoLote_v01 import *\n\n') + sys.stdout.write('import PedidoCancelamentoLote_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoCancelamentoLote" +] diff --git a/nfselib/paulistana/v02/PedidoCancelamentoNFTS_v01.py b/nfselib/paulistana/v02/PedidoCancelamentoNFTS_v01.py new file mode 100644 index 0000000..c5084a9 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoCancelamentoNFTS_v01.py @@ -0,0 +1,1200 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoCancelamentoNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoCancelamentoNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoCancelamentoNFTS_v01.py" ./process_includes/PedidoCancelamentoNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoCancelamentoNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de cancelamento de uma NFTS.Este Schema + XML é utilizado para os tomadores/intermediários de serviços + cancelarem uma NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheNFTS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if DetalheNFTS is None: + self.DetalheNFTS = [] + else: + self.DetalheNFTS = DetalheNFTS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoCancelamentoNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoCancelamentoNFTS.subclass: + return PedidoCancelamentoNFTS.subclass(*args_, **kwargs_) + else: + return PedidoCancelamentoNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheNFTS or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoCancelamentoNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoCancelamentoNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoCancelamentoNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoCancelamentoNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for DetalheNFTS_ in self.DetalheNFTS: + DetalheNFTS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheNFTS', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheNFTS': + obj_ = DetalheNFTSType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheNFTS.append(obj_) + obj_.original_tagname_ = 'DetalheNFTS' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoCancelamentoNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de cancelamento de uma NFTS.Informe a Versão do + Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, transacao=True, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + self.transacao = transacao + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None or + not self.transacao + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + if self.transacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%stransacao>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.transacao, input_name='transacao'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ + elif nodeName_ == 'transacao': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'transacao') + self.transacao = ival_ +# end class CabecalhoType + + +class DetalheNFTSType(GeneratedsSuper): + """Detalhe do pedido de cancelamento de NFTS.""" + subclass = None + superclass = None + def __init__(self, ChaveNFTS=None, AssinaturaCancelamento=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ChaveNFTS = ChaveNFTS + self.AssinaturaCancelamento = AssinaturaCancelamento + self.validate_tpAssinaturaCancelamento(self.AssinaturaCancelamento) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheNFTSType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheNFTSType.subclass: + return DetalheNFTSType.subclass(*args_, **kwargs_) + else: + return DetalheNFTSType(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpAssinaturaCancelamento(self, value): + # Validate type tpAssinaturaCancelamento, a restriction on xs:base64Binary. + pass + def hasContent_(self): + if ( + self.ChaveNFTS is not None or + self.AssinaturaCancelamento is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheNFTSType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheNFTSType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheNFTSType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheNFTSType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheNFTSType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheNFTSType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ChaveNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFTS), input_name='ChaveNFTS')), namespaceprefix_ , eol_)) + if self.AssinaturaCancelamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssinaturaCancelamento>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.AssinaturaCancelamento, input_name='AssinaturaCancelamento'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ChaveNFTS': + ChaveNFTS_ = child_.text + ChaveNFTS_ = self.gds_validate_string(ChaveNFTS_, node, 'ChaveNFTS') + self.ChaveNFTS = ChaveNFTS_ + elif nodeName_ == 'AssinaturaCancelamento': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'AssinaturaCancelamento') + else: + bval_ = None + self.AssinaturaCancelamento = bval_ + # validate type tpAssinaturaCancelamento + self.validate_tpAssinaturaCancelamento(self.AssinaturaCancelamento) +# end class DetalheNFTSType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFTS' + rootClass = PedidoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFTS' + rootClass = PedidoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFTS' + rootClass = PedidoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFTS' + rootClass = PedidoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoCancelamentoNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoCancelamentoNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheNFTSType", + "PedidoCancelamentoNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoCancelamentoNFe_v01.py b/nfselib/paulistana/v02/PedidoCancelamentoNFe_v01.py new file mode 100644 index 0000000..3df4a4a --- /dev/null +++ b/nfselib/paulistana/v02/PedidoCancelamentoNFe_v01.py @@ -0,0 +1,1161 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoCancelamentoNFe_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoCancelamentoNFe_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoCancelamentoNFe_v01.py" ./process_includes/PedidoCancelamentoNFe_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoCancelamentoNFe(GeneratedsSuper): + """Schema utilizado para PEDIDO de Cancelamento de NFS-e.Este Schema + XML é utilizado pelos Prestadores de serviços cancelarem NFS-e + emitidas por eles.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Detalhe=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Detalhe is None: + self.Detalhe = [] + else: + self.Detalhe = Detalhe + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoCancelamentoNFe) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoCancelamentoNFe.subclass: + return PedidoCancelamentoNFe.subclass(*args_, **kwargs_) + else: + return PedidoCancelamentoNFe(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Detalhe or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoNFe', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoCancelamentoNFe') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoCancelamentoNFe') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoCancelamentoNFe', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoCancelamentoNFe'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoCancelamentoNFe', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Detalhe_ in self.Detalhe: + Detalhe_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Detalhe', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Detalhe': + obj_ = DetalheType.factory(parent_object_=self) + obj_.build(child_) + self.Detalhe.append(obj_) + obj_.original_tagname_ = 'Detalhe' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoCancelamentoNFe + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, transacao=True, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.transacao = transacao + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + not self.transacao + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if self.transacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%stransacao>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.transacao, input_name='transacao'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'transacao': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'transacao') + self.transacao = ival_ +# end class CabecalhoType + + +class DetalheType(GeneratedsSuper): + """Detalhe do pedido de cancelamento de NFS-e. Cada detalhe deverá + conter a Chave de uma NFS-e e sua respectiva assinatura de + cancelamento.""" + subclass = None + superclass = None + def __init__(self, ChaveNFe=None, AssinaturaCancelamento=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ChaveNFe = ChaveNFe + self.AssinaturaCancelamento = AssinaturaCancelamento + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheType.subclass: + return DetalheType.subclass(*args_, **kwargs_) + else: + return DetalheType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.ChaveNFe is not None or + self.AssinaturaCancelamento is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ChaveNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFe>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFe), input_name='ChaveNFe')), namespaceprefix_ , eol_)) + if self.AssinaturaCancelamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssinaturaCancelamento>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.AssinaturaCancelamento), input_name='AssinaturaCancelamento')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ChaveNFe': + ChaveNFe_ = child_.text + ChaveNFe_ = self.gds_validate_string(ChaveNFe_, node, 'ChaveNFe') + self.ChaveNFe = ChaveNFe_ + elif nodeName_ == 'AssinaturaCancelamento': + AssinaturaCancelamento_ = child_.text + AssinaturaCancelamento_ = self.gds_validate_string(AssinaturaCancelamento_, node, 'AssinaturaCancelamento') + self.AssinaturaCancelamento = AssinaturaCancelamento_ +# end class DetalheType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFe' + rootClass = PedidoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFe' + rootClass = PedidoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFe' + rootClass = PedidoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoCancelamentoNFe' + rootClass = PedidoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoCancelamentoNFe_v01 import *\n\n') + sys.stdout.write('import PedidoCancelamentoNFe_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheType", + "PedidoCancelamentoNFe" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaCNPJ_v01.py b/nfselib/paulistana/v02/PedidoConsultaCNPJ_v01.py new file mode 100644 index 0000000..1769c0b --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaCNPJ_v01.py @@ -0,0 +1,1058 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaCNPJ_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaCNPJ_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaCNPJ_v01.py" ./process_includes/PedidoConsultaCNPJ_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoConsultaCNPJ(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de CNPJ.Este Schema XML é + utilizado pelos tomadores e/ou prestadores de serviços + consultarem quais Inscrições Municipais (CCM) estão vinculadas a + um determinado CNPJ e se estes CCM emitem NFS-e ou não.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, CNPJContribuinte=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.CNPJContribuinte = CNPJContribuinte + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaCNPJ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaCNPJ.subclass: + return PedidoConsultaCNPJ.subclass(*args_, **kwargs_) + else: + return PedidoConsultaCNPJ(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.CNPJContribuinte is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaCNPJ', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaCNPJ') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaCNPJ') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaCNPJ', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaCNPJ'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaCNPJ', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.CNPJContribuinte is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCNPJContribuinte>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CNPJContribuinte), input_name='CNPJContribuinte')), namespaceprefix_ , eol_)) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'CNPJContribuinte': + CNPJContribuinte_ = child_.text + CNPJContribuinte_ = self.gds_validate_string(CNPJContribuinte_, node, 'CNPJContribuinte') + self.CNPJContribuinte = CNPJContribuinte_ + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoConsultaCNPJ + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCNPJ' + rootClass = PedidoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCNPJ' + rootClass = PedidoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCNPJ' + rootClass = PedidoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCNPJ' + rootClass = PedidoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaCNPJ_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaCNPJ_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoConsultaCNPJ" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaCPOM_v01.py b/nfselib/paulistana/v02/PedidoConsultaCPOM_v01.py new file mode 100644 index 0000000..eb3bd48 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaCPOM_v01.py @@ -0,0 +1,1159 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaCPOM_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaCPOM_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaCPOM_v01.py" ./process_includes/PedidoConsultaCPOM_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoConsultaCPOM(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas ao CPOM.Este Schema XML é + utilizado para os tomadores/intermediários consultaremse um CNPJ + de um prestador de serviços tem inscrição no CPOM.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheCPOM=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.DetalheCPOM = DetalheCPOM + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaCPOM) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaCPOM.subclass: + return PedidoConsultaCPOM.subclass(*args_, **kwargs_) + else: + return PedidoConsultaCPOM(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheCPOM is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaCPOM', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaCPOM') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaCPOM') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaCPOM', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaCPOM'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaCPOM', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.DetalheCPOM is not None: + self.DetalheCPOM.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheCPOM', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheCPOM': + obj_ = DetalheCPOMType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheCPOM = obj_ + obj_.original_tagname_ = 'DetalheCPOM' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoConsultaCPOM + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de consulta ao CPOM.Informe a Versão do Schema + XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +class DetalheCPOMType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, CPFCNPJPrestador=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJPrestador = CPFCNPJPrestador + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheCPOMType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheCPOMType.subclass: + return DetalheCPOMType.subclass(*args_, **kwargs_) + else: + return DetalheCPOMType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJPrestador is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheCPOMType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheCPOMType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheCPOMType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheCPOMType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheCPOMType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheCPOMType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJPrestador is not None: + self.CPFCNPJPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJPrestador', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJPrestador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJPrestador = obj_ + obj_.original_tagname_ = 'CPFCNPJPrestador' +# end class DetalheCPOMType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCPOM' + rootClass = PedidoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCPOM' + rootClass = PedidoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCPOM' + rootClass = PedidoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaCPOM' + rootClass = PedidoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaCPOM_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaCPOM_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheCPOMType", + "PedidoConsultaCPOM" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.py b/nfselib/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.py new file mode 100644 index 0000000..2f986f9 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.py @@ -0,0 +1,1160 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaEmissaoNFSE_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaEmissaoNFSE_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaEmissaoNFSE_v01.py" ./process_includes/PedidoConsultaEmissaoNFSE_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoConsultaEmissaoNFSE(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas a autorização de emissão + da NFSe.Este Schema XML é utilizado para os + tomadores/intermediários consultaremse um CNPJ de um prestador + de serviços possui autorização para emissão de NFSe.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheEmissNFSE=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.DetalheEmissNFSE = DetalheEmissNFSE + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaEmissaoNFSE) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaEmissaoNFSE.subclass: + return PedidoConsultaEmissaoNFSE.subclass(*args_, **kwargs_) + else: + return PedidoConsultaEmissaoNFSE(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheEmissNFSE is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaEmissaoNFSE', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaEmissaoNFSE') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaEmissaoNFSE') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaEmissaoNFSE', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaEmissaoNFSE'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaEmissaoNFSE', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.DetalheEmissNFSE is not None: + self.DetalheEmissNFSE.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheEmissNFSE', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheEmissNFSE': + obj_ = DetalheEmissNFSEType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheEmissNFSE = obj_ + obj_.original_tagname_ = 'DetalheEmissNFSE' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoConsultaEmissaoNFSE + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de consulta a autorização a emissão da + NFSe.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +class DetalheEmissNFSEType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, CPFCNPJPrestador=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJPrestador = CPFCNPJPrestador + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheEmissNFSEType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheEmissNFSEType.subclass: + return DetalheEmissNFSEType.subclass(*args_, **kwargs_) + else: + return DetalheEmissNFSEType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJPrestador is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheEmissNFSEType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheEmissNFSEType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheEmissNFSEType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheEmissNFSEType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheEmissNFSEType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheEmissNFSEType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJPrestador is not None: + self.CPFCNPJPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJPrestador', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJPrestador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJPrestador = obj_ + obj_.original_tagname_ = 'CPFCNPJPrestador' +# end class DetalheEmissNFSEType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaEmissaoNFSE' + rootClass = PedidoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaEmissaoNFSE' + rootClass = PedidoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaEmissaoNFSE' + rootClass = PedidoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaEmissaoNFSE' + rootClass = PedidoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaEmissaoNFSE_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaEmissaoNFSE_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheEmissNFSEType", + "PedidoConsultaEmissaoNFSE" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.py b/nfselib/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.py new file mode 100644 index 0000000..d8e21e6 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.py @@ -0,0 +1,1160 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaInformacoesLoteNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaInformacoesLoteNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaInformacoesLoteNFTS_v01.py" ./process_includes/PedidoConsultaInformacoesLoteNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoConsultaInformacoesLoteNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de informações do Lote de + NFTS.Este Schema XML é utilizado para os + tomadores/intermediários de serviços consultarem as informações + do lote de NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheInformacoesLote=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.DetalheInformacoesLote = DetalheInformacoesLote + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaInformacoesLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaInformacoesLoteNFTS.subclass: + return PedidoConsultaInformacoesLoteNFTS.subclass(*args_, **kwargs_) + else: + return PedidoConsultaInformacoesLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheInformacoesLote is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaInformacoesLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaInformacoesLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaInformacoesLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaInformacoesLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaInformacoesLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaInformacoesLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.DetalheInformacoesLote is not None: + self.DetalheInformacoesLote.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheInformacoesLote', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheInformacoesLote': + obj_ = DetalheInformacoesLoteType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheInformacoesLote = obj_ + obj_.original_tagname_ = 'DetalheInformacoesLote' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoConsultaInformacoesLoteNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de consulta de informações do lote de + NFTS.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +class DetalheInformacoesLoteType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, NumeroLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.NumeroLote = NumeroLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheInformacoesLoteType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheInformacoesLoteType.subclass: + return DetalheInformacoesLoteType.subclass(*args_, **kwargs_) + else: + return DetalheInformacoesLoteType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.NumeroLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheInformacoesLoteType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheInformacoesLoteType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheInformacoesLoteType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheInformacoesLoteType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheInformacoesLoteType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheInformacoesLoteType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroLote), input_name='NumeroLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'NumeroLote': + NumeroLote_ = child_.text + NumeroLote_ = self.gds_validate_string(NumeroLote_, node, 'NumeroLote') + self.NumeroLote = NumeroLote_ +# end class DetalheInformacoesLoteType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaInformacoesLoteNFTS' + rootClass = PedidoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaInformacoesLoteNFTS' + rootClass = PedidoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaInformacoesLoteNFTS' + rootClass = PedidoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaInformacoesLoteNFTS' + rootClass = PedidoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaInformacoesLoteNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaInformacoesLoteNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheInformacoesLoteType", + "PedidoConsultaInformacoesLoteNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaLoteNFTS_v01.py b/nfselib/paulistana/v02/PedidoConsultaLoteNFTS_v01.py new file mode 100644 index 0000000..5046475 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaLoteNFTS_v01.py @@ -0,0 +1,1160 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaLoteNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaLoteNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaLoteNFTS_v01.py" ./process_includes/PedidoConsultaLoteNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoConsultaLoteNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de Lote de NFTS.Este + Schema XML é utilizado para os tomadores/intermediários de + serviços consultarem as NFTS geradas a partir de um lote de + NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheLoteNFTS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.DetalheLoteNFTS = DetalheLoteNFTS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaLoteNFTS.subclass: + return PedidoConsultaLoteNFTS.subclass(*args_, **kwargs_) + else: + return PedidoConsultaLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheLoteNFTS is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.DetalheLoteNFTS is not None: + self.DetalheLoteNFTS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheLoteNFTS', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheLoteNFTS': + obj_ = DetalheLoteNFTSType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheLoteNFTS = obj_ + obj_.original_tagname_ = 'DetalheLoteNFTS' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoConsultaLoteNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de consulta de lote de NFTS.Informe a Versão do + Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +class DetalheLoteNFTSType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, NumeroLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.NumeroLote = NumeroLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheLoteNFTSType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheLoteNFTSType.subclass: + return DetalheLoteNFTSType.subclass(*args_, **kwargs_) + else: + return DetalheLoteNFTSType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.NumeroLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheLoteNFTSType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheLoteNFTSType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheLoteNFTSType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheLoteNFTSType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheLoteNFTSType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheLoteNFTSType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroLote), input_name='NumeroLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'NumeroLote': + NumeroLote_ = child_.text + NumeroLote_ = self.gds_validate_string(NumeroLote_, node, 'NumeroLote') + self.NumeroLote = NumeroLote_ +# end class DetalheLoteNFTSType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLoteNFTS' + rootClass = PedidoConsultaLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLoteNFTS' + rootClass = PedidoConsultaLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLoteNFTS' + rootClass = PedidoConsultaLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLoteNFTS' + rootClass = PedidoConsultaLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaLoteNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaLoteNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheLoteNFTSType", + "PedidoConsultaLoteNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaLote_v01.py b/nfselib/paulistana/v02/PedidoConsultaLote_v01.py new file mode 100644 index 0000000..94ec081 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaLote_v01.py @@ -0,0 +1,1057 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaLote_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaLote_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaLote_v01.py" ./process_includes/PedidoConsultaLote_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoConsultaLote(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de Lote.Este Schema XML é + utilizado pelos prestadores de serviços consultarem as NFS-e + geradas a partir de um lote de RPS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaLote.subclass: + return PedidoConsultaLote.subclass(*args_, **kwargs_) + else: + return PedidoConsultaLote(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoConsultaLote + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, NumeroLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.NumeroLote = NumeroLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + self.NumeroLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroLote), input_name='NumeroLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'NumeroLote': + NumeroLote_ = child_.text + NumeroLote_ = self.gds_validate_string(NumeroLote_, node, 'NumeroLote') + self.NumeroLote = NumeroLote_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLote' + rootClass = PedidoConsultaLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLote' + rootClass = PedidoConsultaLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLote' + rootClass = PedidoConsultaLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaLote' + rootClass = PedidoConsultaLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaLote_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaLote_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoConsultaLote" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaNFTS_v01.py b/nfselib/paulistana/v02/PedidoConsultaNFTS_v01.py new file mode 100644 index 0000000..704f626 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaNFTS_v01.py @@ -0,0 +1,1163 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaNFTS_v01.py" ./process_includes/PedidoConsultaNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoConsultaNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de NFTS.Este Schema XML é + utilizado para os tomadores/intermediários de serviços consultar + NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, DetalheNFTS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if DetalheNFTS is None: + self.DetalheNFTS = [] + else: + self.DetalheNFTS = DetalheNFTS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaNFTS.subclass: + return PedidoConsultaNFTS.subclass(*args_, **kwargs_) + else: + return PedidoConsultaNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.DetalheNFTS or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for DetalheNFTS_ in self.DetalheNFTS: + DetalheNFTS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheNFTS', pretty_print=pretty_print) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'DetalheNFTS': + obj_ = DetalheNFTSType.factory(parent_object_=self) + obj_.build(child_) + self.DetalheNFTS.append(obj_) + obj_.original_tagname_ = 'DetalheNFTS' + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoConsultaNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido de consulta de NFTS.Informe a Versão do Schema + XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +class DetalheNFTSType(GeneratedsSuper): + """Enviar consulta de uma ou várias NFTS.""" + subclass = None + superclass = None + def __init__(self, ChaveNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ChaveNFTS = ChaveNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheNFTSType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheNFTSType.subclass: + return DetalheNFTSType.subclass(*args_, **kwargs_) + else: + return DetalheNFTSType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.ChaveNFTS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheNFTSType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheNFTSType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheNFTSType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheNFTSType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheNFTSType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheNFTSType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ChaveNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFTS), input_name='ChaveNFTS')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ChaveNFTS': + ChaveNFTS_ = child_.text + ChaveNFTS_ = self.gds_validate_string(ChaveNFTS_, node, 'ChaveNFTS') + self.ChaveNFTS = ChaveNFTS_ +# end class DetalheNFTSType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFTS' + rootClass = PedidoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFTS' + rootClass = PedidoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFTS' + rootClass = PedidoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFTS' + rootClass = PedidoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheNFTSType", + "PedidoConsultaNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaNFePeriodo_v01.py b/nfselib/paulistana/v02/PedidoConsultaNFePeriodo_v01.py new file mode 100644 index 0000000..976f5ef --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaNFePeriodo_v01.py @@ -0,0 +1,1102 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaNFePeriodo_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaNFePeriodo_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaNFePeriodo_v01.py" ./process_includes/PedidoConsultaNFePeriodo_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoConsultaNFePeriodo(GeneratedsSuper): + """Schema utilizado para PEDIDO de consulta de NFS-e Emitidas ou + Recebidas por período.Este Schema XML é utilizado pelos + Prestadores/Tomadores de serviços consultarem NFS-e Emitidas ou + Recebidas por eles.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaNFePeriodo) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaNFePeriodo.subclass: + return PedidoConsultaNFePeriodo.subclass(*args_, **kwargs_) + else: + return PedidoConsultaNFePeriodo(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFePeriodo', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaNFePeriodo') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaNFePeriodo') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaNFePeriodo', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaNFePeriodo'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFePeriodo', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoConsultaNFePeriodo + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, CPFCNPJ=None, Inscricao=None, dtInicio=None, dtFim=None, NumeroPagina='1', **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.CPFCNPJ = CPFCNPJ + self.Inscricao = Inscricao + if isinstance(dtInicio, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtInicio, '%Y-%m-%d').date() + else: + initvalue_ = dtInicio + self.dtInicio = initvalue_ + if isinstance(dtFim, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtFim, '%Y-%m-%d').date() + else: + initvalue_ = dtFim + self.dtFim = initvalue_ + self.NumeroPagina = NumeroPagina + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + self.CPFCNPJ is not None or + self.Inscricao is not None or + self.dtInicio is not None or + self.dtFim is not None or + self.NumeroPagina != "1" + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if self.CPFCNPJ is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJ>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJ), input_name='CPFCNPJ')), namespaceprefix_ , eol_)) + if self.Inscricao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Inscricao), input_name='Inscricao')), namespaceprefix_ , eol_)) + if self.dtInicio is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtInicio>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtInicio, input_name='dtInicio'), namespaceprefix_ , eol_)) + if self.dtFim is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtFim>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtFim, input_name='dtFim'), namespaceprefix_ , eol_)) + if self.NumeroPagina is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroPagina>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroPagina), input_name='NumeroPagina')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'CPFCNPJ': + CPFCNPJ_ = child_.text + CPFCNPJ_ = self.gds_validate_string(CPFCNPJ_, node, 'CPFCNPJ') + self.CPFCNPJ = CPFCNPJ_ + elif nodeName_ == 'Inscricao': + Inscricao_ = child_.text + Inscricao_ = self.gds_validate_string(Inscricao_, node, 'Inscricao') + self.Inscricao = Inscricao_ + elif nodeName_ == 'dtInicio': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtInicio = dval_ + elif nodeName_ == 'dtFim': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtFim = dval_ + elif nodeName_ == 'NumeroPagina': + NumeroPagina_ = child_.text + NumeroPagina_ = self.gds_validate_string(NumeroPagina_, node, 'NumeroPagina') + self.NumeroPagina = NumeroPagina_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFePeriodo' + rootClass = PedidoConsultaNFePeriodo + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFePeriodo' + rootClass = PedidoConsultaNFePeriodo + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFePeriodo' + rootClass = PedidoConsultaNFePeriodo + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFePeriodo' + rootClass = PedidoConsultaNFePeriodo + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaNFePeriodo_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaNFePeriodo_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoConsultaNFePeriodo" +] diff --git a/nfselib/paulistana/v02/PedidoConsultaNFe_v01.py b/nfselib/paulistana/v02/PedidoConsultaNFe_v01.py new file mode 100644 index 0000000..583b007 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoConsultaNFe_v01.py @@ -0,0 +1,1145 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoConsultaNFe_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoConsultaNFe_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoConsultaNFe_v01.py" ./process_includes/PedidoConsultaNFe_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoConsultaNFe(GeneratedsSuper): + """Schema utilizado para PEDIDO de consultas de NFS-e.Este Schema XML é + utilizado pelos prestadores de serviços consultarem NFS-e + geradas por eles.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Detalhe=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Detalhe is None: + self.Detalhe = [] + else: + self.Detalhe = Detalhe + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoConsultaNFe) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoConsultaNFe.subclass: + return PedidoConsultaNFe.subclass(*args_, **kwargs_) + else: + return PedidoConsultaNFe(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Detalhe or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFe', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoConsultaNFe') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoConsultaNFe') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoConsultaNFe', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoConsultaNFe'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoConsultaNFe', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Detalhe_ in self.Detalhe: + Detalhe_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Detalhe', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Detalhe': + obj_ = DetalheType.factory(parent_object_=self) + obj_.build(child_) + self.Detalhe.append(obj_) + obj_.original_tagname_ = 'Detalhe' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoConsultaNFe + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ +# end class CabecalhoType + + +class DetalheType(GeneratedsSuper): + """Detalhe do pedido. Cada item de detalhe deverá conter a chave de uma + NFS-e ou a chave de um RPS.""" + subclass = None + superclass = None + def __init__(self, ChaveRPS=None, ChaveNFe=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ChaveRPS = ChaveRPS + self.ChaveNFe = ChaveNFe + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheType.subclass: + return DetalheType.subclass(*args_, **kwargs_) + else: + return DetalheType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.ChaveRPS is not None or + self.ChaveNFe is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ChaveRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveRPS), input_name='ChaveRPS')), namespaceprefix_ , eol_)) + if self.ChaveNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFe>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFe), input_name='ChaveNFe')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ChaveRPS': + ChaveRPS_ = child_.text + ChaveRPS_ = self.gds_validate_string(ChaveRPS_, node, 'ChaveRPS') + self.ChaveRPS = ChaveRPS_ + elif nodeName_ == 'ChaveNFe': + ChaveNFe_ = child_.text + ChaveNFe_ = self.gds_validate_string(ChaveNFe_, node, 'ChaveNFe') + self.ChaveNFe = ChaveNFe_ +# end class DetalheType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFe' + rootClass = PedidoConsultaNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFe' + rootClass = PedidoConsultaNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFe' + rootClass = PedidoConsultaNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoConsultaNFe' + rootClass = PedidoConsultaNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoConsultaNFe_v01 import *\n\n') + sys.stdout.write('import PedidoConsultaNFe_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheType", + "PedidoConsultaNFe" +] diff --git a/nfselib/paulistana/v02/PedidoEnvioLoteNFTS_v01.py b/nfselib/paulistana/v02/PedidoEnvioLoteNFTS_v01.py new file mode 100644 index 0000000..5047a8c --- /dev/null +++ b/nfselib/paulistana/v02/PedidoEnvioLoteNFTS_v01.py @@ -0,0 +1,1182 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoEnvioLoteNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoEnvioLoteNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoEnvioLoteNFTS_v01.py" ./process_includes/PedidoEnvioLoteNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoEnvioLoteNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de envio de lote de NFTS.Este Schema + XML é utilizado pelos tomadores/intermediários de serviços para + emissão de NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, NFTS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if NFTS is None: + self.NFTS = [] + else: + self.NFTS = NFTS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoEnvioLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoEnvioLoteNFTS.subclass: + return PedidoEnvioLoteNFTS.subclass(*args_, **kwargs_) + else: + return PedidoEnvioLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.NFTS or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoEnvioLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoEnvioLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoEnvioLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoEnvioLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for NFTS_ in self.NFTS: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(NFTS_), input_name='NFTS')), namespaceprefix_ , eol_)) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'NFTS': + NFTS_ = child_.text + NFTS_ = self.gds_validate_string(NFTS_, node, 'NFTS') + self.NFTS.append(NFTS_) + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoEnvioLoteNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido NFTS.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', id=None, Remetente=None, transacao=True, dtInicio=None, dtFim=None, QtdNFTS=None, ValorTotalServicos=None, ValorTotalDeducoes=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + self.transacao = transacao + if isinstance(dtInicio, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtInicio, '%Y-%m-%d').date() + else: + initvalue_ = dtInicio + self.dtInicio = initvalue_ + if isinstance(dtFim, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtFim, '%Y-%m-%d').date() + else: + initvalue_ = dtFim + self.dtFim = initvalue_ + self.QtdNFTS = QtdNFTS + self.validate_tpQuantidade(self.QtdNFTS) + self.ValorTotalServicos = ValorTotalServicos + self.validate_tpValor(self.ValorTotalServicos) + self.ValorTotalDeducoes = ValorTotalDeducoes + self.validate_tpValor(self.ValorTotalDeducoes) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpQuantidade(self, value): + # Validate type tpQuantidade, a restriction on xs:long. + pass + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + pass + def hasContent_(self): + if ( + self.Remetente is not None or + not self.transacao or + self.dtInicio is not None or + self.dtFim is not None or + self.QtdNFTS is not None or + self.ValorTotalServicos is not None or + self.ValorTotalDeducoes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + if not self.transacao: + showIndent(outfile, level, pretty_print) + outfile.write('<%stransacao>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.transacao, input_name='transacao'), namespaceprefix_ , eol_)) + if self.dtInicio is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtInicio>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtInicio, input_name='dtInicio'), namespaceprefix_ , eol_)) + if self.dtFim is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtFim>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtFim, input_name='dtFim'), namespaceprefix_ , eol_)) + if self.QtdNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sQtdNFTS>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.QtdNFTS, input_name='QtdNFTS'), namespaceprefix_ , eol_)) + if self.ValorTotalServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalServicos, input_name='ValorTotalServicos'), namespaceprefix_ , eol_)) + if self.ValorTotalDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalDeducoes, input_name='ValorTotalDeducoes'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ + elif nodeName_ == 'transacao': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'transacao') + self.transacao = ival_ + elif nodeName_ == 'dtInicio': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtInicio = dval_ + elif nodeName_ == 'dtFim': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtFim = dval_ + elif nodeName_ == 'QtdNFTS' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'QtdNFTS') + self.QtdNFTS = ival_ + # validate type tpQuantidade + self.validate_tpQuantidade(self.QtdNFTS) + elif nodeName_ == 'ValorTotalServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalServicos') + self.ValorTotalServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalServicos) + elif nodeName_ == 'ValorTotalDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalDeducoes') + self.ValorTotalDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalDeducoes) +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteNFTS' + rootClass = PedidoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteNFTS' + rootClass = PedidoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteNFTS' + rootClass = PedidoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteNFTS' + rootClass = PedidoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoEnvioLoteNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoEnvioLoteNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoEnvioLoteNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py b/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py new file mode 100644 index 0000000..9688e8a --- /dev/null +++ b/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py @@ -0,0 +1,1128 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoEnvioLoteRPS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoEnvioLoteRPS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoEnvioLoteRPS_v01.py" ./process_includes/PedidoEnvioLoteRPS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoEnvioLoteRPS(GeneratedsSuper): + """Schema utilizado para PEDIDO de envio de lote de RPS.Este Schema XML + é utilizado pelos prestadores de serviços para substituição em + lote de RPS por NFS-e.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, RPS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if RPS is None: + self.RPS = [] + else: + self.RPS = RPS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoEnvioLoteRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoEnvioLoteRPS.subclass: + return PedidoEnvioLoteRPS.subclass(*args_, **kwargs_) + else: + return PedidoEnvioLoteRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.RPS or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioLoteRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoEnvioLoteRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoEnvioLoteRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoEnvioLoteRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoEnvioLoteRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioLoteRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for RPS_ in self.RPS: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RPS_), input_name='RPS')), namespaceprefix_ , eol_)) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'RPS': + RPS_ = child_.text + RPS_ = self.gds_validate_string(RPS_, node, 'RPS') + self.RPS.append(RPS_) + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoEnvioLoteRPS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, transacao=True, dtInicio=None, dtFim=None, QtdRPS=None, ValorTotalServicos=None, ValorTotalDeducoes=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.transacao = transacao + if isinstance(dtInicio, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtInicio, '%Y-%m-%d').date() + else: + initvalue_ = dtInicio + self.dtInicio = initvalue_ + if isinstance(dtFim, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtFim, '%Y-%m-%d').date() + else: + initvalue_ = dtFim + self.dtFim = initvalue_ + self.QtdRPS = QtdRPS + self.ValorTotalServicos = ValorTotalServicos + self.ValorTotalDeducoes = ValorTotalDeducoes + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + not self.transacao or + self.dtInicio is not None or + self.dtFim is not None or + self.QtdRPS is not None or + self.ValorTotalServicos is not None or + self.ValorTotalDeducoes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if not self.transacao: + showIndent(outfile, level, pretty_print) + outfile.write('<%stransacao>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.transacao, input_name='transacao'), namespaceprefix_ , eol_)) + if self.dtInicio is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtInicio>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtInicio, input_name='dtInicio'), namespaceprefix_ , eol_)) + if self.dtFim is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtFim>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtFim, input_name='dtFim'), namespaceprefix_ , eol_)) + if self.QtdRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sQtdRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.QtdRPS), input_name='QtdRPS')), namespaceprefix_ , eol_)) + if self.ValorTotalServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValorTotalServicos), input_name='ValorTotalServicos')), namespaceprefix_ , eol_)) + if self.ValorTotalDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalDeducoes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValorTotalDeducoes), input_name='ValorTotalDeducoes')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'transacao': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'transacao') + self.transacao = ival_ + elif nodeName_ == 'dtInicio': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtInicio = dval_ + elif nodeName_ == 'dtFim': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.dtFim = dval_ + elif nodeName_ == 'QtdRPS': + QtdRPS_ = child_.text + QtdRPS_ = self.gds_validate_string(QtdRPS_, node, 'QtdRPS') + self.QtdRPS = QtdRPS_ + elif nodeName_ == 'ValorTotalServicos': + ValorTotalServicos_ = child_.text + ValorTotalServicos_ = self.gds_validate_string(ValorTotalServicos_, node, 'ValorTotalServicos') + self.ValorTotalServicos = ValorTotalServicos_ + elif nodeName_ == 'ValorTotalDeducoes': + ValorTotalDeducoes_ = child_.text + ValorTotalDeducoes_ = self.gds_validate_string(ValorTotalDeducoes_, node, 'ValorTotalDeducoes') + self.ValorTotalDeducoes = ValorTotalDeducoes_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteRPS' + rootClass = PedidoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteRPS' + rootClass = PedidoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteRPS' + rootClass = PedidoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioLoteRPS' + rootClass = PedidoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoEnvioLoteRPS_v01 import *\n\n') + sys.stdout.write('import PedidoEnvioLoteRPS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoEnvioLoteRPS" +] diff --git a/nfselib/paulistana/v02/PedidoEnvioNFTS_v01.py b/nfselib/paulistana/v02/PedidoEnvioNFTS_v01.py new file mode 100644 index 0000000..f149cdb --- /dev/null +++ b/nfselib/paulistana/v02/PedidoEnvioNFTS_v01.py @@ -0,0 +1,1084 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoEnvioNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoEnvioNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoEnvioNFTS_v01.py" ./process_includes/PedidoEnvioNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class PedidoEnvioNFTS(GeneratedsSuper): + """Schema utilizado para PEDIDO de envio de NFTS.Este Schema XML é + utilizado pelos tomadores/intermediários de serviços para + emissão de NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, NFTS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.NFTS = NFTS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoEnvioNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoEnvioNFTS.subclass: + return PedidoEnvioNFTS.subclass(*args_, **kwargs_) + else: + return PedidoEnvioNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.NFTS is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoEnvioNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoEnvioNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoEnvioNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoEnvioNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.NFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NFTS), input_name='NFTS')), namespaceprefix_ , eol_)) + if self.Signature is not None: + self.Signature.export(outfile, level, namespaceprefix_='ds:', namespacedef_='', name_='Signature', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'NFTS': + NFTS_ = child_.text + NFTS_ = self.gds_validate_string(NFTS_, node, 'NFTS') + self.NFTS = NFTS_ + elif nodeName_ == 'Signature': + obj_ = SignatureType.factory(parent_object_=self) + obj_.build(child_) + self.Signature = obj_ + obj_.original_tagname_ = 'Signature' +# end class PedidoEnvioNFTS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido NFTS.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao=None, id=None, Remetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.id = _cast(None, id) + self.Remetente = Remetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Remetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + if self.id is not None and 'id' not in already_processed: + already_processed.add('id') + outfile.write(' id=%s' % (quote_attrib(self.id), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Remetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Remetente), input_name='Remetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + value = find_attr_value_('id', node) + if value is not None and 'id' not in already_processed: + already_processed.add('id') + self.id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Remetente': + Remetente_ = child_.text + Remetente_ = self.gds_validate_string(Remetente_, node, 'Remetente') + self.Remetente = Remetente_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioNFTS' + rootClass = PedidoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioNFTS' + rootClass = PedidoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioNFTS' + rootClass = PedidoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioNFTS' + rootClass = PedidoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoEnvioNFTS_v01 import *\n\n') + sys.stdout.write('import PedidoEnvioNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoEnvioNFTS" +] diff --git a/nfselib/paulistana/v02/PedidoEnvioRPS_v01.py b/nfselib/paulistana/v02/PedidoEnvioRPS_v01.py new file mode 100644 index 0000000..bc71937 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoEnvioRPS_v01.py @@ -0,0 +1,1057 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:29 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoEnvioRPS_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoEnvioRPS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoEnvioRPS_v01.py" ./process_includes/PedidoEnvioRPS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoEnvioRPS(GeneratedsSuper): + """Schema utilizado para PEDIDO de envio de RPS.Este Schema XML é + utilizado pelos prestadores de serviços para substituição online + e individual de RPS por NFS-e.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, RPS=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.RPS = RPS + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoEnvioRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoEnvioRPS.subclass: + return PedidoEnvioRPS.subclass(*args_, **kwargs_) + else: + return PedidoEnvioRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.RPS is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoEnvioRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoEnvioRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoEnvioRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoEnvioRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoEnvioRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.RPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RPS), input_name='RPS')), namespaceprefix_ , eol_)) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'RPS': + RPS_ = child_.text + RPS_ = self.gds_validate_string(RPS_, node, 'RPS') + self.RPS = RPS_ + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoEnvioRPS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioRPS' + rootClass = PedidoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioRPS' + rootClass = PedidoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioRPS' + rootClass = PedidoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoEnvioRPS' + rootClass = PedidoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoEnvioRPS_v01 import *\n\n') + sys.stdout.write('import PedidoEnvioRPS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoEnvioRPS" +] diff --git a/nfselib/paulistana/v02/PedidoInformacoesLote_v01.py b/nfselib/paulistana/v02/PedidoInformacoesLote_v01.py new file mode 100644 index 0000000..a16b197 --- /dev/null +++ b/nfselib/paulistana/v02/PedidoInformacoesLote_v01.py @@ -0,0 +1,1066 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/PedidoInformacoesLote_v01.py') +# +# Command line arguments: +# ./process_includes/PedidoInformacoesLote_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/PedidoInformacoesLote_v01.py" ./process_includes/PedidoInformacoesLote_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class PedidoInformacoesLote(GeneratedsSuper): + """Schema utilizado para PEDIDO de informações de lote.Este Schema XML + é utilizado pelos prestadores de serviços para obterem + informações de lotes de RPS que geraram NFS-e.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Signature=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.Signature = Signature + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, PedidoInformacoesLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if PedidoInformacoesLote.subclass: + return PedidoInformacoesLote.subclass(*args_, **kwargs_) + else: + return PedidoInformacoesLote(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Signature is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoInformacoesLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('PedidoInformacoesLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PedidoInformacoesLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PedidoInformacoesLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PedidoInformacoesLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PedidoInformacoesLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + if self.Signature is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Signature': + Signature_ = child_.text + Signature_ = self.gds_validate_string(Signature_, node, 'Signature') + self.Signature = Signature_ +# end class PedidoInformacoesLote + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do pedido.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', CPFCNPJRemetente=None, NumeroLote=None, InscricaoPrestador=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.CPFCNPJRemetente = CPFCNPJRemetente + self.NumeroLote = NumeroLote + self.InscricaoPrestador = InscricaoPrestador + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJRemetente is not None or + self.NumeroLote is not None or + self.InscricaoPrestador is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJRemetente is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroLote), input_name='NumeroLote')), namespaceprefix_ , eol_)) + if self.InscricaoPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoPrestador), input_name='InscricaoPrestador')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJRemetente': + CPFCNPJRemetente_ = child_.text + CPFCNPJRemetente_ = self.gds_validate_string(CPFCNPJRemetente_, node, 'CPFCNPJRemetente') + self.CPFCNPJRemetente = CPFCNPJRemetente_ + elif nodeName_ == 'NumeroLote': + NumeroLote_ = child_.text + NumeroLote_ = self.gds_validate_string(NumeroLote_, node, 'NumeroLote') + self.NumeroLote = NumeroLote_ + elif nodeName_ == 'InscricaoPrestador': + InscricaoPrestador_ = child_.text + InscricaoPrestador_ = self.gds_validate_string(InscricaoPrestador_, node, 'InscricaoPrestador') + self.InscricaoPrestador = InscricaoPrestador_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoInformacoesLote' + rootClass = PedidoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoInformacoesLote' + rootClass = PedidoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoInformacoesLote' + rootClass = PedidoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'PedidoInformacoesLote' + rootClass = PedidoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from PedidoInformacoesLote_v01 import *\n\n') + sys.stdout.write('import PedidoInformacoesLote_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "PedidoInformacoesLote" +] diff --git a/nfselib/paulistana/v02/RetornoCancelamentoNFTS_v01.py b/nfselib/paulistana/v02/RetornoCancelamentoNFTS_v01.py new file mode 100644 index 0000000..1ef8ee3 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoCancelamentoNFTS_v01.py @@ -0,0 +1,990 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoCancelamentoNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoCancelamentoNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoCancelamentoNFTS_v01.py" ./process_includes/RetornoCancelamentoNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoCancelamentoNFTS(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de cancelamento da + NFTS.Este Schema XML é utilizado pelo Web Service para informar + aos tomadores/intermediários de serviços o resultado do pedido + de cancelamento de uma NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, ListaRetornoCancelamento=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if ListaRetornoCancelamento is None: + self.ListaRetornoCancelamento = [] + else: + self.ListaRetornoCancelamento = ListaRetornoCancelamento + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoCancelamentoNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoCancelamentoNFTS.subclass: + return RetornoCancelamentoNFTS.subclass(*args_, **kwargs_) + else: + return RetornoCancelamentoNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.ListaRetornoCancelamento + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoCancelamentoNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoCancelamentoNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoCancelamentoNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoCancelamentoNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoCancelamentoNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoCancelamentoNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + for ListaRetornoCancelamento_ in self.ListaRetornoCancelamento: + showIndent(outfile, level, pretty_print) + outfile.write('<%sListaRetornoCancelamento>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ListaRetornoCancelamento_), input_name='ListaRetornoCancelamento')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'ListaRetornoCancelamento': + ListaRetornoCancelamento_ = child_.text + ListaRetornoCancelamento_ = self.gds_validate_string(ListaRetornoCancelamento_, node, 'ListaRetornoCancelamento') + self.ListaRetornoCancelamento.append(ListaRetornoCancelamento_) +# end class RetornoCancelamentoNFTS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFTS' + rootClass = RetornoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFTS' + rootClass = RetornoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFTS' + rootClass = RetornoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFTS' + rootClass = RetornoCancelamentoNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoCancelamentoNFTS_v01 import *\n\n') + sys.stdout.write('import RetornoCancelamentoNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoCancelamentoNFTS" +] diff --git a/nfselib/paulistana/v02/RetornoCancelamentoNFe_v01.py b/nfselib/paulistana/v02/RetornoCancelamentoNFe_v01.py new file mode 100644 index 0000000..4875967 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoCancelamentoNFe_v01.py @@ -0,0 +1,1064 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoCancelamentoNFe_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoCancelamentoNFe_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoCancelamentoNFe_v01.py" ./process_includes/RetornoCancelamentoNFe_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoCancelamentoNFe(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de cancelamento de + NFS-e.Este Schema XML é utilizado pelo Web Service para informar + aos prestadores de serviços qual o resultado do pedido de + cancelamento de NFS-e.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoCancelamentoNFe) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoCancelamentoNFe.subclass: + return RetornoCancelamentoNFe.subclass(*args_, **kwargs_) + else: + return RetornoCancelamentoNFe(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoCancelamentoNFe', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoCancelamentoNFe') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoCancelamentoNFe') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoCancelamentoNFe', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoCancelamentoNFe'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoCancelamentoNFe', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) +# end class RetornoCancelamentoNFe + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFe' + rootClass = RetornoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFe' + rootClass = RetornoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFe' + rootClass = RetornoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoCancelamentoNFe' + rootClass = RetornoCancelamentoNFe + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoCancelamentoNFe_v01 import *\n\n') + sys.stdout.write('import RetornoCancelamentoNFe_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "RetornoCancelamentoNFe" +] diff --git a/nfselib/paulistana/v02/RetornoConsultaCNPJ_v01.py b/nfselib/paulistana/v02/RetornoConsultaCNPJ_v01.py new file mode 100644 index 0000000..7996f12 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsultaCNPJ_v01.py @@ -0,0 +1,1166 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsultaCNPJ_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsultaCNPJ_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsultaCNPJ_v01.py" ./process_includes/RetornoConsultaCNPJ_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoConsultaCNPJ(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de Consultas de CNPJ.Este + Schema XML é utilizado pelo Web Service para informar aos + tomadores e/ou prestadores de serviços quais Inscrições + Municipais (CCM) estão vinculadas a um determinado CNPJ e se + estes CCM emitem NFS-e ou não.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, Detalhe=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + if Detalhe is None: + self.Detalhe = [] + else: + self.Detalhe = Detalhe + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsultaCNPJ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsultaCNPJ.subclass: + return RetornoConsultaCNPJ.subclass(*args_, **kwargs_) + else: + return RetornoConsultaCNPJ(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro or + self.Detalhe + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaCNPJ', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsultaCNPJ') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsultaCNPJ') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsultaCNPJ', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsultaCNPJ'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaCNPJ', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + for Detalhe_ in self.Detalhe: + Detalhe_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Detalhe', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) + elif nodeName_ == 'Detalhe': + obj_ = DetalheType.factory(parent_object_=self) + obj_.build(child_) + self.Detalhe.append(obj_) + obj_.original_tagname_ = 'Detalhe' +# end class RetornoConsultaCNPJ + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ +# end class CabecalhoType + + +class DetalheType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, InscricaoMunicipal=None, EmiteNFe=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoMunicipal = InscricaoMunicipal + self.EmiteNFe = EmiteNFe + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DetalheType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DetalheType.subclass: + return DetalheType.subclass(*args_, **kwargs_) + else: + return DetalheType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.InscricaoMunicipal is not None or + self.EmiteNFe is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DetalheType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DetalheType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DetalheType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DetalheType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DetalheType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InscricaoMunicipal), input_name='InscricaoMunicipal')), namespaceprefix_ , eol_)) + if self.EmiteNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmiteNFe>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.EmiteNFe, input_name='EmiteNFe'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoMunicipal': + InscricaoMunicipal_ = child_.text + InscricaoMunicipal_ = self.gds_validate_string(InscricaoMunicipal_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = InscricaoMunicipal_ + elif nodeName_ == 'EmiteNFe': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'EmiteNFe') + self.EmiteNFe = ival_ +# end class DetalheType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCNPJ' + rootClass = RetornoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCNPJ' + rootClass = RetornoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCNPJ' + rootClass = RetornoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCNPJ' + rootClass = RetornoConsultaCNPJ + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsultaCNPJ_v01 import *\n\n') + sys.stdout.write('import RetornoConsultaCNPJ_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "DetalheType", + "RetornoConsultaCNPJ" +] diff --git a/nfselib/paulistana/v02/RetornoConsultaCPOM_v01.py b/nfselib/paulistana/v02/RetornoConsultaCPOM_v01.py new file mode 100644 index 0000000..88343d3 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsultaCPOM_v01.py @@ -0,0 +1,987 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsultaCPOM_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsultaCPOM_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsultaCPOM_v01.py" ./process_includes/RetornoConsultaCPOM_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoConsultaCPOM(GeneratedsSuper): + """Schema utilizado para RETORNO de consultas ao CPOM.Este Schema XML é + utilizado pelo Web Service para informar aos + tomadores/intermediários de serviçoso resultado do pedido de + consulta de inscrição no CPOM.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, RetornoCPOM=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.RetornoCPOM = RetornoCPOM + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsultaCPOM) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsultaCPOM.subclass: + return RetornoConsultaCPOM.subclass(*args_, **kwargs_) + else: + return RetornoConsultaCPOM(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.RetornoCPOM is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaCPOM', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsultaCPOM') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsultaCPOM') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsultaCPOM', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsultaCPOM'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaCPOM', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + if self.RetornoCPOM is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetornoCPOM>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetornoCPOM), input_name='RetornoCPOM')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'RetornoCPOM': + RetornoCPOM_ = child_.text + RetornoCPOM_ = self.gds_validate_string(RetornoCPOM_, node, 'RetornoCPOM') + self.RetornoCPOM = RetornoCPOM_ +# end class RetornoConsultaCPOM + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCPOM' + rootClass = RetornoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCPOM' + rootClass = RetornoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCPOM' + rootClass = RetornoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaCPOM' + rootClass = RetornoConsultaCPOM + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsultaCPOM_v01 import *\n\n') + sys.stdout.write('import RetornoConsultaCPOM_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoConsultaCPOM" +] diff --git a/nfselib/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.py b/nfselib/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.py new file mode 100644 index 0000000..0cc8a41 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.py @@ -0,0 +1,987 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsultaEmissaoNFSE_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsultaEmissaoNFSE_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsultaEmissaoNFSE_v01.py" ./process_includes/RetornoConsultaEmissaoNFSE_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoConsultaEmissaoNFSE(GeneratedsSuper): + """Schema utilizado para RETORNO de consultas a autorização de emissão + da NFSE.Este Schema XML é utilizado pelo Web Service para + informar aos tomadores/intermediários de serviçoso resultado do + pedido de consulta de autorização a emissão da NFSE.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, RetornoEmissaoNFSE=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.RetornoEmissaoNFSE = RetornoEmissaoNFSE + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsultaEmissaoNFSE) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsultaEmissaoNFSE.subclass: + return RetornoConsultaEmissaoNFSE.subclass(*args_, **kwargs_) + else: + return RetornoConsultaEmissaoNFSE(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.RetornoEmissaoNFSE is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaEmissaoNFSE', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsultaEmissaoNFSE') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsultaEmissaoNFSE') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsultaEmissaoNFSE', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsultaEmissaoNFSE'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaEmissaoNFSE', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + if self.RetornoEmissaoNFSE is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetornoEmissaoNFSE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RetornoEmissaoNFSE), input_name='RetornoEmissaoNFSE')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'RetornoEmissaoNFSE': + RetornoEmissaoNFSE_ = child_.text + RetornoEmissaoNFSE_ = self.gds_validate_string(RetornoEmissaoNFSE_, node, 'RetornoEmissaoNFSE') + self.RetornoEmissaoNFSE = RetornoEmissaoNFSE_ +# end class RetornoConsultaEmissaoNFSE + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaEmissaoNFSE' + rootClass = RetornoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaEmissaoNFSE' + rootClass = RetornoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaEmissaoNFSE' + rootClass = RetornoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaEmissaoNFSE' + rootClass = RetornoConsultaEmissaoNFSE + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsultaEmissaoNFSE_v01 import *\n\n') + sys.stdout.write('import RetornoConsultaEmissaoNFSE_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoConsultaEmissaoNFSE" +] diff --git a/nfselib/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.py b/nfselib/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.py new file mode 100644 index 0000000..fc5b252 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.py @@ -0,0 +1,990 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsultaInformacoesLoteNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsultaInformacoesLoteNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsultaInformacoesLoteNFTS_v01.py" ./process_includes/RetornoConsultaInformacoesLoteNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoConsultaInformacoesLoteNFTS(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de Informações de Lote de + NFTS.Este Schema XML é utilizado pelo Web Service para informar + aos tomadores/intermediários de serviços o resultado do pedido + de informações de lote de NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, ListaRetornoLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if ListaRetornoLote is None: + self.ListaRetornoLote = [] + else: + self.ListaRetornoLote = ListaRetornoLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsultaInformacoesLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsultaInformacoesLoteNFTS.subclass: + return RetornoConsultaInformacoesLoteNFTS.subclass(*args_, **kwargs_) + else: + return RetornoConsultaInformacoesLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.ListaRetornoLote + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaInformacoesLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsultaInformacoesLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsultaInformacoesLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsultaInformacoesLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsultaInformacoesLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaInformacoesLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + for ListaRetornoLote_ in self.ListaRetornoLote: + showIndent(outfile, level, pretty_print) + outfile.write('<%sListaRetornoLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ListaRetornoLote_), input_name='ListaRetornoLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'ListaRetornoLote': + ListaRetornoLote_ = child_.text + ListaRetornoLote_ = self.gds_validate_string(ListaRetornoLote_, node, 'ListaRetornoLote') + self.ListaRetornoLote.append(ListaRetornoLote_) +# end class RetornoConsultaInformacoesLoteNFTS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaInformacoesLoteNFTS' + rootClass = RetornoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaInformacoesLoteNFTS' + rootClass = RetornoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaInformacoesLoteNFTS' + rootClass = RetornoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaInformacoesLoteNFTS' + rootClass = RetornoConsultaInformacoesLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsultaInformacoesLoteNFTS_v01 import *\n\n') + sys.stdout.write('import RetornoConsultaInformacoesLoteNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoConsultaInformacoesLoteNFTS" +] diff --git a/nfselib/paulistana/v02/RetornoConsultaNFTS_v01.py b/nfselib/paulistana/v02/RetornoConsultaNFTS_v01.py new file mode 100644 index 0000000..a6df11a --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsultaNFTS_v01.py @@ -0,0 +1,992 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsultaNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsultaNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsultaNFTS_v01.py" ./process_includes/RetornoConsultaNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoConsultaNFTS(GeneratedsSuper): + """Schema utilizado para RETORNO de pedidos de consulta de NFTS, + consulta de NFTS recebidas e consulta de lote de NFTS.Este + Schema XML é utilizado pelo Web Service para informar aos + tomadores e/ou intermediários de serviços o resultado de pedidos + de consulta de NFTS, consulta de NFTS recebidas e consulta de + lote de NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, RetornoConsultaLoteNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if RetornoConsultaLoteNFTS is None: + self.RetornoConsultaLoteNFTS = [] + else: + self.RetornoConsultaLoteNFTS = RetornoConsultaLoteNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsultaNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsultaNFTS.subclass: + return RetornoConsultaNFTS.subclass(*args_, **kwargs_) + else: + return RetornoConsultaNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.RetornoConsultaLoteNFTS + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsultaNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsultaNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsultaNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsultaNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsultaNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + for RetornoConsultaLoteNFTS_ in self.RetornoConsultaLoteNFTS: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRetornoConsultaLoteNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RetornoConsultaLoteNFTS_), input_name='RetornoConsultaLoteNFTS')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'RetornoConsultaLoteNFTS': + RetornoConsultaLoteNFTS_ = child_.text + RetornoConsultaLoteNFTS_ = self.gds_validate_string(RetornoConsultaLoteNFTS_, node, 'RetornoConsultaLoteNFTS') + self.RetornoConsultaLoteNFTS.append(RetornoConsultaLoteNFTS_) +# end class RetornoConsultaNFTS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaNFTS' + rootClass = RetornoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaNFTS' + rootClass = RetornoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaNFTS' + rootClass = RetornoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsultaNFTS' + rootClass = RetornoConsultaNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsultaNFTS_v01 import *\n\n') + sys.stdout.write('import RetornoConsultaNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoConsultaNFTS" +] diff --git a/nfselib/paulistana/v02/RetornoConsulta_v01.py b/nfselib/paulistana/v02/RetornoConsulta_v01.py new file mode 100644 index 0000000..b84a3aa --- /dev/null +++ b/nfselib/paulistana/v02/RetornoConsulta_v01.py @@ -0,0 +1,1077 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoConsulta_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoConsulta_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoConsulta_v01.py" ./process_includes/RetornoConsulta_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoConsulta(GeneratedsSuper): + """Schema utilizado para RETORNO de pedidos de consulta de NFS-e/RPS, + consultade NFS-e recebidas e consulta de lote.Este Schema XML é + utilizado pelo Web Service para informar aos tomadores e/ou + prestadores de serviços o resultado de pedidos de consulta de + NFS-e/RPS, consultade NFS-e recebidas e consulta de lote.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, NFe=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + if NFe is None: + self.NFe = [] + else: + self.NFe = NFe + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoConsulta) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoConsulta.subclass: + return RetornoConsulta.subclass(*args_, **kwargs_) + else: + return RetornoConsulta(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro or + self.NFe + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsulta', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoConsulta') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoConsulta') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoConsulta', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoConsulta'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoConsulta', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + for NFe_ in self.NFe: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNFe>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(NFe_), input_name='NFe')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) + elif nodeName_ == 'NFe': + NFe_ = child_.text + NFe_ = self.gds_validate_string(NFe_, node, 'NFe') + self.NFe.append(NFe_) +# end class RetornoConsulta + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsulta' + rootClass = RetornoConsulta + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsulta' + rootClass = RetornoConsulta + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsulta' + rootClass = RetornoConsulta + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoConsulta' + rootClass = RetornoConsulta + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoConsulta_v01 import *\n\n') + sys.stdout.write('import RetornoConsulta_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "RetornoConsulta" +] diff --git a/nfselib/paulistana/v02/RetornoEnvioLoteNFTS_v01.py b/nfselib/paulistana/v02/RetornoEnvioLoteNFTS_v01.py new file mode 100644 index 0000000..6b865bb --- /dev/null +++ b/nfselib/paulistana/v02/RetornoEnvioLoteNFTS_v01.py @@ -0,0 +1,987 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoEnvioLoteNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoEnvioLoteNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoEnvioLoteNFTS_v01.py" ./process_includes/RetornoEnvioLoteNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoEnvioLoteNFTS(GeneratedsSuper): + """Schema utilizado para Retorno de envio de lote de NFTS.Este Schema + XML é utilizado para informar os tomadores/intermediários de + serviços o resultado do pedido do envio do lote da emissão de + NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, ListaRetornoLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + self.ListaRetornoLote = ListaRetornoLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoEnvioLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoEnvioLoteNFTS.subclass: + return RetornoEnvioLoteNFTS.subclass(*args_, **kwargs_) + else: + return RetornoEnvioLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.ListaRetornoLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoEnvioLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoEnvioLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoEnvioLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoEnvioLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + if self.ListaRetornoLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sListaRetornoLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ListaRetornoLote), input_name='ListaRetornoLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'ListaRetornoLote': + ListaRetornoLote_ = child_.text + ListaRetornoLote_ = self.gds_validate_string(ListaRetornoLote_, node, 'ListaRetornoLote') + self.ListaRetornoLote = ListaRetornoLote_ +# end class RetornoEnvioLoteNFTS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteNFTS' + rootClass = RetornoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteNFTS' + rootClass = RetornoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteNFTS' + rootClass = RetornoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteNFTS' + rootClass = RetornoEnvioLoteNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoEnvioLoteNFTS_v01 import *\n\n') + sys.stdout.write('import RetornoEnvioLoteNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoEnvioLoteNFTS" +] diff --git a/nfselib/paulistana/v02/RetornoEnvioLoteRPS_v01.py b/nfselib/paulistana/v02/RetornoEnvioLoteRPS_v01.py new file mode 100644 index 0000000..8313d0a --- /dev/null +++ b/nfselib/paulistana/v02/RetornoEnvioLoteRPS_v01.py @@ -0,0 +1,1085 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoEnvioLoteRPS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoEnvioLoteRPS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoEnvioLoteRPS_v01.py" ./process_includes/RetornoEnvioLoteRPS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoEnvioLoteRPS(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de Envio de lote de + RPS.Este Schema XML é utilizado pelo Web Service para informar + aos prestadores de serviços o resultado do pedido de envio de + lote de RPS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, ChaveNFeRPS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + if ChaveNFeRPS is None: + self.ChaveNFeRPS = [] + else: + self.ChaveNFeRPS = ChaveNFeRPS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoEnvioLoteRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoEnvioLoteRPS.subclass: + return RetornoEnvioLoteRPS.subclass(*args_, **kwargs_) + else: + return RetornoEnvioLoteRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro or + self.ChaveNFeRPS + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioLoteRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoEnvioLoteRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoEnvioLoteRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoEnvioLoteRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoEnvioLoteRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioLoteRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + for ChaveNFeRPS_ in self.ChaveNFeRPS: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFeRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ChaveNFeRPS_), input_name='ChaveNFeRPS')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) + elif nodeName_ == 'ChaveNFeRPS': + ChaveNFeRPS_ = child_.text + ChaveNFeRPS_ = self.gds_validate_string(ChaveNFeRPS_, node, 'ChaveNFeRPS') + self.ChaveNFeRPS.append(ChaveNFeRPS_) +# end class RetornoEnvioLoteRPS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, InformacoesLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + self.InformacoesLote = InformacoesLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None or + self.InformacoesLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + if self.InformacoesLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInformacoesLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InformacoesLote), input_name='InformacoesLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ + elif nodeName_ == 'InformacoesLote': + InformacoesLote_ = child_.text + InformacoesLote_ = self.gds_validate_string(InformacoesLote_, node, 'InformacoesLote') + self.InformacoesLote = InformacoesLote_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteRPS' + rootClass = RetornoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteRPS' + rootClass = RetornoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteRPS' + rootClass = RetornoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioLoteRPS' + rootClass = RetornoEnvioLoteRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoEnvioLoteRPS_v01 import *\n\n') + sys.stdout.write('import RetornoEnvioLoteRPS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "RetornoEnvioLoteRPS" +] diff --git a/nfselib/paulistana/v02/RetornoEnvioNFTS_v01.py b/nfselib/paulistana/v02/RetornoEnvioNFTS_v01.py new file mode 100644 index 0000000..9c0fcff --- /dev/null +++ b/nfselib/paulistana/v02/RetornoEnvioNFTS_v01.py @@ -0,0 +1,999 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoEnvioNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoEnvioNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoEnvioNFTS_v01.py" ./process_includes/RetornoEnvioNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class RetornoEnvioNFTS(GeneratedsSuper): + """Schema utilizado para Retorno de envio de lote de NFTS.Este Schema + XML é utilizado para informar os tomadores/intermediários de + serviços o resultado do pedido do envio do lote da emissão de + NFTS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, ListaRetornoNFTS=None, ChaveNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if ListaRetornoNFTS is None: + self.ListaRetornoNFTS = [] + else: + self.ListaRetornoNFTS = ListaRetornoNFTS + self.ChaveNFTS = ChaveNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoEnvioNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoEnvioNFTS.subclass: + return RetornoEnvioNFTS.subclass(*args_, **kwargs_) + else: + return RetornoEnvioNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.ListaRetornoNFTS or + self.ChaveNFTS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoEnvioNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoEnvioNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoEnvioNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoEnvioNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCabecalho>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cabecalho), input_name='Cabecalho')), namespaceprefix_ , eol_)) + for ListaRetornoNFTS_ in self.ListaRetornoNFTS: + showIndent(outfile, level, pretty_print) + outfile.write('<%sListaRetornoNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(ListaRetornoNFTS_), input_name='ListaRetornoNFTS')), namespaceprefix_ , eol_)) + if self.ChaveNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFTS), input_name='ChaveNFTS')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + Cabecalho_ = child_.text + Cabecalho_ = self.gds_validate_string(Cabecalho_, node, 'Cabecalho') + self.Cabecalho = Cabecalho_ + elif nodeName_ == 'ListaRetornoNFTS': + ListaRetornoNFTS_ = child_.text + ListaRetornoNFTS_ = self.gds_validate_string(ListaRetornoNFTS_, node, 'ListaRetornoNFTS') + self.ListaRetornoNFTS.append(ListaRetornoNFTS_) + elif nodeName_ == 'ChaveNFTS': + ChaveNFTS_ = child_.text + ChaveNFTS_ = self.gds_validate_string(ChaveNFTS_, node, 'ChaveNFTS') + self.ChaveNFTS = ChaveNFTS_ +# end class RetornoEnvioNFTS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioNFTS' + rootClass = RetornoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioNFTS' + rootClass = RetornoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioNFTS' + rootClass = RetornoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioNFTS' + rootClass = RetornoEnvioNFTS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoEnvioNFTS_v01 import *\n\n') + sys.stdout.write('import RetornoEnvioNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "RetornoEnvioNFTS" +] diff --git a/nfselib/paulistana/v02/RetornoEnvioRPS_v01.py b/nfselib/paulistana/v02/RetornoEnvioRPS_v01.py new file mode 100644 index 0000000..06b5480 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoEnvioRPS_v01.py @@ -0,0 +1,1072 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoEnvioRPS_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoEnvioRPS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoEnvioRPS_v01.py" ./process_includes/RetornoEnvioRPS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoEnvioRPS(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de Envio de RPS.Este Schema + XML é utilizado pelo Web Service para informar aos prestadores + de serviços o resultado do pedido de envio de RPS.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, ChaveNFeRPS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + self.ChaveNFeRPS = ChaveNFeRPS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoEnvioRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoEnvioRPS.subclass: + return RetornoEnvioRPS.subclass(*args_, **kwargs_) + else: + return RetornoEnvioRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro or + self.ChaveNFeRPS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoEnvioRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoEnvioRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoEnvioRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoEnvioRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoEnvioRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + if self.ChaveNFeRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sChaveNFeRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ChaveNFeRPS), input_name='ChaveNFeRPS')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) + elif nodeName_ == 'ChaveNFeRPS': + ChaveNFeRPS_ = child_.text + ChaveNFeRPS_ = self.gds_validate_string(ChaveNFeRPS_, node, 'ChaveNFeRPS') + self.ChaveNFeRPS = ChaveNFeRPS_ +# end class RetornoEnvioRPS + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioRPS' + rootClass = RetornoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioRPS' + rootClass = RetornoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioRPS' + rootClass = RetornoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoEnvioRPS' + rootClass = RetornoEnvioRPS + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoEnvioRPS_v01 import *\n\n') + sys.stdout.write('import RetornoEnvioRPS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "RetornoEnvioRPS" +] diff --git a/nfselib/paulistana/v02/RetornoInformacoesLote_v01.py b/nfselib/paulistana/v02/RetornoInformacoesLote_v01.py new file mode 100644 index 0000000..30dc809 --- /dev/null +++ b/nfselib/paulistana/v02/RetornoInformacoesLote_v01.py @@ -0,0 +1,1073 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/RetornoInformacoesLote_v01.py') +# +# Command line arguments: +# ./process_includes/RetornoInformacoesLote_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/RetornoInformacoesLote_v01.py" ./process_includes/RetornoInformacoesLote_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class RetornoInformacoesLote(GeneratedsSuper): + """Schema utilizado para RETORNO de Pedidos de Informações de Lote.Este + Schema XML é utilizado pelo Web Service para informar aos + prestadores de serviços o resultado do pedido de informações de + lote.""" + subclass = None + superclass = None + def __init__(self, Cabecalho=None, Alerta=None, Erro=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Cabecalho = Cabecalho + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RetornoInformacoesLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RetornoInformacoesLote.subclass: + return RetornoInformacoesLote.subclass(*args_, **kwargs_) + else: + return RetornoInformacoesLote(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Cabecalho is not None or + self.Alerta or + self.Erro + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoInformacoesLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RetornoInformacoesLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RetornoInformacoesLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RetornoInformacoesLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RetornoInformacoesLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RetornoInformacoesLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Cabecalho is not None: + self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) + for Alerta_ in self.Alerta: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAlerta>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Alerta_), input_name='Alerta')), namespaceprefix_ , eol_)) + for Erro_ in self.Erro: + showIndent(outfile, level, pretty_print) + outfile.write('<%sErro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(Erro_), input_name='Erro')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Cabecalho': + obj_ = CabecalhoType.factory(parent_object_=self) + obj_.build(child_) + self.Cabecalho = obj_ + obj_.original_tagname_ = 'Cabecalho' + elif nodeName_ == 'Alerta': + Alerta_ = child_.text + Alerta_ = self.gds_validate_string(Alerta_, node, 'Alerta') + self.Alerta.append(Alerta_) + elif nodeName_ == 'Erro': + Erro_ = child_.text + Erro_ = self.gds_validate_string(Erro_, node, 'Erro') + self.Erro.append(Erro_) +# end class RetornoInformacoesLote + + +class CabecalhoType(GeneratedsSuper): + """Cabeçalho do retorno.Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, InformacoesLote=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(None, Versao) + self.Sucesso = Sucesso + self.InformacoesLote = InformacoesLote + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CabecalhoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CabecalhoType.subclass: + return CabecalhoType.subclass(*args_, **kwargs_) + else: + return CabecalhoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Sucesso is not None or + self.InformacoesLote is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CabecalhoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CabecalhoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CabecalhoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): + if self.Versao != "1" and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Sucesso), input_name='Sucesso')), namespaceprefix_ , eol_)) + if self.InformacoesLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInformacoesLote>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.InformacoesLote), input_name='InformacoesLote')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + Sucesso_ = child_.text + Sucesso_ = self.gds_validate_string(Sucesso_, node, 'Sucesso') + self.Sucesso = Sucesso_ + elif nodeName_ == 'InformacoesLote': + InformacoesLote_ = child_.text + InformacoesLote_ = self.gds_validate_string(InformacoesLote_, node, 'InformacoesLote') + self.InformacoesLote = InformacoesLote_ +# end class CabecalhoType + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoInformacoesLote' + rootClass = RetornoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoInformacoesLote' + rootClass = RetornoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoInformacoesLote' + rootClass = RetornoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'RetornoInformacoesLote' + rootClass = RetornoInformacoesLote + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from RetornoInformacoesLote_v01 import *\n\n') + sys.stdout.write('import RetornoInformacoesLote_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CabecalhoType", + "RetornoInformacoesLote" +] diff --git a/nfselib/paulistana/v02/TiposNFTS_v01.py b/nfselib/paulistana/v02/TiposNFTS_v01.py new file mode 100644 index 0000000..101c56a --- /dev/null +++ b/nfselib/paulistana/v02/TiposNFTS_v01.py @@ -0,0 +1,4413 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/TiposNFTS_v01.py') +# +# Command line arguments: +# ./process_includes/TiposNFTS_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/TiposNFTS_v01.py" ./process_includes/TiposNFTS_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class tpStatusNFTS(object): + N='N' + C='C' + + +class tpTipoDocumentoNFTS(object): + _0_1='01' + _0_2='02' + _0_3='03' + _0_5='05' + + +class tpTributacaoNFTS(object): + T='T' + I='I' + J='J' + + +class tpEvento(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Codigo=None, Descricao=None, IdentificacaoDocumento=None, IdentificacaoNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Codigo = Codigo + self.validate_tpCodigoEvento(self.Codigo) + self.Descricao = Descricao + self.validate_tpDescricaoEvento(self.Descricao) + self.IdentificacaoDocumento = IdentificacaoDocumento + self.IdentificacaoNFTS = IdentificacaoNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpEvento) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpEvento.subclass: + return tpEvento.subclass(*args_, **kwargs_) + else: + return tpEvento(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpCodigoEvento(self, value): + # Validate type tpCodigoEvento, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoEvento_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoEvento_patterns_, )) + validate_tpCodigoEvento_patterns_ = [['^[0-9]{3,4}$']] + def validate_tpDescricaoEvento(self, value): + # Validate type tpDescricaoEvento, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 300: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.Codigo is not None or + self.Descricao is not None or + self.IdentificacaoDocumento is not None or + self.IdentificacaoNFTS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEvento', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEvento') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpEvento') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpEvento', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpEvento'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEvento', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Codigo is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigo>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Codigo, input_name='Codigo'), namespaceprefix_ , eol_)) + if self.Descricao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescricao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Descricao), input_name='Descricao')), namespaceprefix_ , eol_)) + if self.IdentificacaoDocumento is not None: + self.IdentificacaoDocumento.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IdentificacaoDocumento', pretty_print=pretty_print) + if self.IdentificacaoNFTS is not None: + self.IdentificacaoNFTS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='IdentificacaoNFTS', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Codigo' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'Codigo') + self.Codigo = ival_ + # validate type tpCodigoEvento + self.validate_tpCodigoEvento(self.Codigo) + elif nodeName_ == 'Descricao': + Descricao_ = child_.text + Descricao_ = self.gds_validate_string(Descricao_, node, 'Descricao') + self.Descricao = Descricao_ + # validate type tpDescricaoEvento + self.validate_tpDescricaoEvento(self.Descricao) + elif nodeName_ == 'IdentificacaoDocumento': + obj_ = tpIdentificacaoDocumento.factory(parent_object_=self) + obj_.build(child_) + self.IdentificacaoDocumento = obj_ + obj_.original_tagname_ = 'IdentificacaoDocumento' + elif nodeName_ == 'IdentificacaoNFTS': + obj_ = tpIdentificacaoNFTS.factory(parent_object_=self) + obj_.build(child_) + self.IdentificacaoNFTS = obj_ + obj_.original_tagname_ = 'IdentificacaoNFTS' +# end class tpEvento + + +class tpInformacoesLoteNFTS(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, NumeroLote=None, Remetente=None, dtEnvioLote=None, QtdeNFTSProcessadas=None, TempoProcessamento=None, ValorTotalServicos=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.NumeroLote = NumeroLote + self.validate_tpNumeroLote(self.NumeroLote) + self.Remetente = Remetente + if isinstance(dtEnvioLote, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(dtEnvioLote, '%Y-%m-%dT%H:%M:%S') + else: + initvalue_ = dtEnvioLote + self.dtEnvioLote = initvalue_ + self.QtdeNFTSProcessadas = QtdeNFTSProcessadas + self.validate_tpQuantidade(self.QtdeNFTSProcessadas) + self.TempoProcessamento = TempoProcessamento + self.validate_tpTempoProcessamento(self.TempoProcessamento) + self.ValorTotalServicos = ValorTotalServicos + self.validate_tpValor(self.ValorTotalServicos) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpInformacoesLoteNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpInformacoesLoteNFTS.subclass: + return tpInformacoesLoteNFTS.subclass(*args_, **kwargs_) + else: + return tpInformacoesLoteNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpNumeroLote(self, value): + # Validate type tpNumeroLote, a restriction on xs:nonNegativeInteger. + if value is not None and Validate_simpletypes_: + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroLote' % {"value" : value} ) + def validate_tpQuantidade(self, value): + # Validate type tpQuantidade, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpQuantidade_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpQuantidade_patterns_, )) + validate_tpQuantidade_patterns_ = [['^[0-9]{1,15}$']] + def validate_tpTempoProcessamento(self, value): + # Validate type tpTempoProcessamento, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpTempoProcessamento_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpTempoProcessamento_patterns_, )) + validate_tpTempoProcessamento_patterns_ = [['^[0-9]{1,15}$']] + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def hasContent_(self): + if ( + self.NumeroLote is not None or + self.Remetente is not None or + self.dtEnvioLote is not None or + self.QtdeNFTSProcessadas is not None or + self.TempoProcessamento is not None or + self.ValorTotalServicos is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpInformacoesLoteNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInformacoesLoteNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpInformacoesLoteNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpInformacoesLoteNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpInformacoesLoteNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpInformacoesLoteNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), namespaceprefix_ , eol_)) + if self.Remetente is not None: + self.Remetente.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Remetente', pretty_print=pretty_print) + if self.dtEnvioLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sdtEnvioLote>%s%s' % (namespaceprefix_ , self.gds_format_datetime(self.dtEnvioLote, input_name='dtEnvioLote'), namespaceprefix_ , eol_)) + if self.QtdeNFTSProcessadas is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sQtdeNFTSProcessadas>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.QtdeNFTSProcessadas, input_name='QtdeNFTSProcessadas'), namespaceprefix_ , eol_)) + if self.TempoProcessamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTempoProcessamento>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TempoProcessamento, input_name='TempoProcessamento'), namespaceprefix_ , eol_)) + if self.ValorTotalServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalServicos, input_name='ValorTotalServicos'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'NumeroLote' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + if ival_ < 0: + raise_parse_error(child_, 'requires nonNegativeInteger') + ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote') + self.NumeroLote = ival_ + # validate type tpNumeroLote + self.validate_tpNumeroLote(self.NumeroLote) + elif nodeName_ == 'Remetente': + obj_ = tpRemetente.factory(parent_object_=self) + obj_.build(child_) + self.Remetente = obj_ + obj_.original_tagname_ = 'Remetente' + elif nodeName_ == 'dtEnvioLote': + sval_ = child_.text + dval_ = self.gds_parse_datetime(sval_) + self.dtEnvioLote = dval_ + elif nodeName_ == 'QtdeNFTSProcessadas' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'QtdeNFTSProcessadas') + self.QtdeNFTSProcessadas = ival_ + # validate type tpQuantidade + self.validate_tpQuantidade(self.QtdeNFTSProcessadas) + elif nodeName_ == 'TempoProcessamento' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'TempoProcessamento') + self.TempoProcessamento = ival_ + # validate type tpTempoProcessamento + self.validate_tpTempoProcessamento(self.TempoProcessamento) + elif nodeName_ == 'ValorTotalServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalServicos') + self.ValorTotalServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalServicos) +# end class tpInformacoesLoteNFTS + + +class tpCabecalho(GeneratedsSuper): + """Cabeçalho do retorno.Informe a Versão do Schema XML utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao=None, Sucesso=None, InformacoesLoteNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(int, Versao) + self.Sucesso = Sucesso + self.validate_tpSucesso(self.Sucesso) + self.InformacoesLoteNFTS = InformacoesLoteNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpCabecalho) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpCabecalho.subclass: + return tpCabecalho.subclass(*args_, **kwargs_) + else: + return tpCabecalho(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpSucesso(self, value): + # Validate type tpSucesso, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def hasContent_(self): + if ( + self.Sucesso is not None or + self.InformacoesLoteNFTS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCabecalho', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpCabecalho') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpCabecalho') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpCabecalho', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpCabecalho'): + if self.Versao is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCabecalho', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Sucesso, input_name='Sucesso'), namespaceprefix_ , eol_)) + if self.InformacoesLoteNFTS is not None: + self.InformacoesLoteNFTS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='InformacoesLoteNFTS', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'Sucesso') + self.Sucesso = ival_ + # validate type tpSucesso + self.validate_tpSucesso(self.Sucesso) + elif nodeName_ == 'InformacoesLoteNFTS': + obj_ = tpInformacoesLoteNFTS.factory(parent_object_=self) + obj_.build(child_) + self.InformacoesLoteNFTS = obj_ + obj_.original_tagname_ = 'InformacoesLoteNFTS' +# end class tpCabecalho + + +class tpCabecalhoRetorno(GeneratedsSuper): + """Cabeçalho do retorno para consultas.Informe a Versão do Schema XML + utilizado.""" + subclass = None + superclass = None + def __init__(self, Versao='1', Sucesso=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Versao = _cast(int, Versao) + self.Sucesso = Sucesso + self.validate_tpSucesso(self.Sucesso) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpCabecalhoRetorno) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpCabecalhoRetorno.subclass: + return tpCabecalhoRetorno.subclass(*args_, **kwargs_) + else: + return tpCabecalhoRetorno(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpSucesso(self, value): + # Validate type tpSucesso, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def hasContent_(self): + if ( + self.Sucesso is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCabecalhoRetorno', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpCabecalhoRetorno') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpCabecalhoRetorno') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpCabecalhoRetorno', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpCabecalhoRetorno'): + if self.Versao != 1 and 'Versao' not in already_processed: + already_processed.add('Versao') + outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCabecalhoRetorno', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Sucesso is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSucesso>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.Sucesso, input_name='Sucesso'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Versao', node) + if value is not None and 'Versao' not in already_processed: + already_processed.add('Versao') + self.Versao = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Sucesso': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'Sucesso') + self.Sucesso = ival_ + # validate type tpSucesso + self.validate_tpSucesso(self.Sucesso) +# end class tpCabecalhoRetorno + + +class tpListaRetornoLote(GeneratedsSuper): + """Lista de mensagens de retorno do lote""" + subclass = None + superclass = None + def __init__(self, Alerta=None, Erro=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpListaRetornoLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpListaRetornoLote.subclass: + return tpListaRetornoLote.subclass(*args_, **kwargs_) + else: + return tpListaRetornoLote(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Alerta or + self.Erro + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaRetornoLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpListaRetornoLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpListaRetornoLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpListaRetornoLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Alerta_ in self.Alerta: + Alerta_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Alerta', pretty_print=pretty_print) + for Erro_ in self.Erro: + Erro_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Erro', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Alerta': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Alerta.append(obj_) + obj_.original_tagname_ = 'Alerta' + elif nodeName_ == 'Erro': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Erro.append(obj_) + obj_.original_tagname_ = 'Erro' +# end class tpListaRetornoLote + + +class tpListaRetornoNFTS(GeneratedsSuper): + """Lista de mensagens de retorno da NFTS""" + subclass = None + superclass = None + def __init__(self, Alerta=None, Erro=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpListaRetornoNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpListaRetornoNFTS.subclass: + return tpListaRetornoNFTS.subclass(*args_, **kwargs_) + else: + return tpListaRetornoNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Alerta or + self.Erro + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaRetornoNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpListaRetornoNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpListaRetornoNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpListaRetornoNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Alerta_ in self.Alerta: + Alerta_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Alerta', pretty_print=pretty_print) + for Erro_ in self.Erro: + Erro_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Erro', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Alerta': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Alerta.append(obj_) + obj_.original_tagname_ = 'Alerta' + elif nodeName_ == 'Erro': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Erro.append(obj_) + obj_.original_tagname_ = 'Erro' +# end class tpListaRetornoNFTS + + +class tpListaRetornoConsultaNFTS(GeneratedsSuper): + """Lista de mensagens de retorno do lote""" + subclass = None + superclass = None + def __init__(self, ListaRetornoLote=None, NFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if ListaRetornoLote is None: + self.ListaRetornoLote = [] + else: + self.ListaRetornoLote = ListaRetornoLote + if NFTS is None: + self.NFTS = [] + else: + self.NFTS = NFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpListaRetornoConsultaNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpListaRetornoConsultaNFTS.subclass: + return tpListaRetornoConsultaNFTS.subclass(*args_, **kwargs_) + else: + return tpListaRetornoConsultaNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.ListaRetornoLote or + self.NFTS + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoConsultaNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaRetornoConsultaNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpListaRetornoConsultaNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpListaRetornoConsultaNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpListaRetornoConsultaNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaRetornoConsultaNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for ListaRetornoLote_ in self.ListaRetornoLote: + ListaRetornoLote_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ListaRetornoLote', pretty_print=pretty_print) + for NFTS_ in self.NFTS: + NFTS_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='NFTS', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ListaRetornoLote': + obj_ = tpListaRetornoLote.factory(parent_object_=self) + obj_.build(child_) + self.ListaRetornoLote.append(obj_) + obj_.original_tagname_ = 'ListaRetornoLote' + elif nodeName_ == 'NFTS': + obj_ = tpNFTSRetorno.factory(parent_object_=self) + obj_.build(child_) + self.NFTS.append(obj_) + obj_.original_tagname_ = 'NFTS' +# end class tpListaRetornoConsultaNFTS + + +class tpIdentificacaoDocumento(GeneratedsSuper): + """Identificação da NFTS.""" + subclass = None + superclass = None + def __init__(self, Posicao=None, ChaveDocumento=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Posicao = Posicao + self.validate_PosicaoType(self.Posicao) + self.ChaveDocumento = ChaveDocumento + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpIdentificacaoDocumento) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpIdentificacaoDocumento.subclass: + return tpIdentificacaoDocumento.subclass(*args_, **kwargs_) + else: + return tpIdentificacaoDocumento(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_PosicaoType(self, value): + # Validate type PosicaoType, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_PosicaoType_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_PosicaoType_patterns_, )) + validate_PosicaoType_patterns_ = [['^[0-9]{1,2}$']] + def hasContent_(self): + if ( + self.Posicao is not None or + self.ChaveDocumento is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpIdentificacaoDocumento', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpIdentificacaoDocumento') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpIdentificacaoDocumento') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpIdentificacaoDocumento', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpIdentificacaoDocumento'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpIdentificacaoDocumento', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Posicao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sPosicao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Posicao, input_name='Posicao'), namespaceprefix_ , eol_)) + if self.ChaveDocumento is not None: + self.ChaveDocumento.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveDocumento', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Posicao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'Posicao') + self.Posicao = ival_ + # validate type PosicaoType + self.validate_PosicaoType(self.Posicao) + elif nodeName_ == 'ChaveDocumento': + obj_ = tpChaveDocumento.factory(parent_object_=self) + obj_.build(child_) + self.ChaveDocumento = obj_ + obj_.original_tagname_ = 'ChaveDocumento' +# end class tpIdentificacaoDocumento + + +class tpIdentificacaoNFTS(GeneratedsSuper): + """Identificação da NFTS.""" + subclass = None + superclass = None + def __init__(self, Posicao=None, ChaveNFTS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Posicao = Posicao + self.validate_PosicaoType1(self.Posicao) + self.ChaveNFTS = ChaveNFTS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpIdentificacaoNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpIdentificacaoNFTS.subclass: + return tpIdentificacaoNFTS.subclass(*args_, **kwargs_) + else: + return tpIdentificacaoNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_PosicaoType1(self, value): + # Validate type PosicaoType1, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_PosicaoType1_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_PosicaoType1_patterns_, )) + validate_PosicaoType1_patterns_ = [['^[0-9]{1,2}$']] + def hasContent_(self): + if ( + self.Posicao is not None or + self.ChaveNFTS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpIdentificacaoNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpIdentificacaoNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpIdentificacaoNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpIdentificacaoNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpIdentificacaoNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpIdentificacaoNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Posicao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sPosicao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Posicao, input_name='Posicao'), namespaceprefix_ , eol_)) + if self.ChaveNFTS is not None: + self.ChaveNFTS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveNFTS', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Posicao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'Posicao') + self.Posicao = ival_ + # validate type PosicaoType1 + self.validate_PosicaoType1(self.Posicao) + elif nodeName_ == 'ChaveNFTS': + obj_ = tpChaveNFTS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveNFTS = obj_ + obj_.original_tagname_ = 'ChaveNFTS' +# end class tpIdentificacaoNFTS + + +class tpCPFCNPJ(GeneratedsSuper): + """Tipo que representa um CPF/CNPJ.""" + subclass = None + superclass = None + def __init__(self, CPF=None, CNPJ=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPF = CPF + self.validate_tpCPF(self.CPF) + self.CNPJ = CNPJ + self.validate_tpCNPJ(self.CNPJ) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpCPFCNPJ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpCPFCNPJ.subclass: + return tpCPFCNPJ.subclass(*args_, **kwargs_) + else: + return tpCPFCNPJ(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpCPF(self, value): + # Validate type tpCPF, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCPF_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPF_patterns_, )) + validate_tpCPF_patterns_ = [['^[0-9]{11}$']] + def validate_tpCNPJ(self, value): + # Validate type tpCNPJ, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCNPJ_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCNPJ_patterns_, )) + validate_tpCNPJ_patterns_ = [['^[0-9]{14}$']] + def hasContent_(self): + if ( + self.CPF is not None or + self.CNPJ is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCPFCNPJ', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpCPFCNPJ') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpCPFCNPJ') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpCPFCNPJ', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpCPFCNPJ'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCPFCNPJ', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPF is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPF), input_name='CPF')), namespaceprefix_ , eol_)) + if self.CNPJ is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCNPJ>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CNPJ), input_name='CNPJ')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPF': + CPF_ = child_.text + CPF_ = self.gds_validate_string(CPF_, node, 'CPF') + self.CPF = CPF_ + # validate type tpCPF + self.validate_tpCPF(self.CPF) + elif nodeName_ == 'CNPJ': + CNPJ_ = child_.text + CNPJ_ = self.gds_validate_string(CNPJ_, node, 'CNPJ') + self.CNPJ = CNPJ_ + # validate type tpCNPJ + self.validate_tpCNPJ(self.CNPJ) +# end class tpCPFCNPJ + + +class tpRemetente(GeneratedsSuper): + """Dados do Remetende CPF/CNPJ""" + subclass = None + superclass = None + def __init__(self, CPFCNPJ=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJ = CPFCNPJ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpRemetente) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpRemetente.subclass: + return tpRemetente.subclass(*args_, **kwargs_) + else: + return tpRemetente(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJ is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRemetente', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRemetente') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpRemetente') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpRemetente', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpRemetente'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRemetente', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJ is not None: + self.CPFCNPJ.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJ', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJ': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJ = obj_ + obj_.original_tagname_ = 'CPFCNPJ' +# end class tpRemetente + + +class tpRemetente_ComCCM(GeneratedsSuper): + """Dados do Remetende CPF/CNPJ e Inscricao Municipal""" + subclass = None + superclass = None + def __init__(self, CPFCNPJ=None, InscricaoMunicipal=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJ = CPFCNPJ + self.InscricaoMunicipal = InscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpRemetente_ComCCM) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpRemetente_ComCCM.subclass: + return tpRemetente_ComCCM.subclass(*args_, **kwargs_) + else: + return tpRemetente_ComCCM(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def hasContent_(self): + if ( + self.CPFCNPJ is not None or + self.InscricaoMunicipal is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRemetente_ComCCM', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRemetente_ComCCM') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpRemetente_ComCCM') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpRemetente_ComCCM', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpRemetente_ComCCM'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRemetente_ComCCM', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJ is not None: + self.CPFCNPJ.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJ', pretty_print=pretty_print) + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipal, input_name='InscricaoMunicipal'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJ': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJ = obj_ + obj_.original_tagname_ = 'CPFCNPJ' + elif nodeName_ == 'InscricaoMunicipal' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) +# end class tpRemetente_ComCCM + + +class tpChaveDocumento(GeneratedsSuper): + """Chave de identificação da NFTS.""" + subclass = None + superclass = None + def __init__(self, InscricaoMunicipal=None, SerieNFTS=None, NumeroDocumento=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoMunicipal = InscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + self.SerieNFTS = SerieNFTS + self.validate_tpSerieNFTS(self.SerieNFTS) + self.NumeroDocumento = NumeroDocumento + self.validate_tpNumeroDocumento(self.NumeroDocumento) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpChaveDocumento) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpChaveDocumento.subclass: + return tpChaveDocumento.subclass(*args_, **kwargs_) + else: + return tpChaveDocumento(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpSerieNFTS(self, value): + # Validate type tpSerieNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 5: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpSerieNFTS' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpSerieNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpNumeroDocumento(self, value): + # Validate type tpNumeroDocumento, a restriction on xs:nonNegativeInteger. + if value is not None and Validate_simpletypes_: + if len(str(value)) >= 12: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroDocumento' % {"value" : value} ) + def hasContent_(self): + if ( + self.InscricaoMunicipal is not None or + self.SerieNFTS is not None or + self.NumeroDocumento is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveDocumento', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveDocumento') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpChaveDocumento') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpChaveDocumento', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpChaveDocumento'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveDocumento', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipal, input_name='InscricaoMunicipal'), namespaceprefix_ , eol_)) + if self.SerieNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSerieNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SerieNFTS), input_name='SerieNFTS')), namespaceprefix_ , eol_)) + if self.NumeroDocumento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroDocumento>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroDocumento, input_name='NumeroDocumento'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoMunicipal' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + elif nodeName_ == 'SerieNFTS': + SerieNFTS_ = child_.text + SerieNFTS_ = self.gds_validate_string(SerieNFTS_, node, 'SerieNFTS') + self.SerieNFTS = SerieNFTS_ + # validate type tpSerieNFTS + self.validate_tpSerieNFTS(self.SerieNFTS) + elif nodeName_ == 'NumeroDocumento' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + if ival_ < 0: + raise_parse_error(child_, 'requires nonNegativeInteger') + ival_ = self.gds_validate_integer(ival_, node, 'NumeroDocumento') + self.NumeroDocumento = ival_ + # validate type tpNumeroDocumento + self.validate_tpNumeroDocumento(self.NumeroDocumento) +# end class tpChaveDocumento + + +class tpChaveNFTS(GeneratedsSuper): + """Chave de identificação da NFTS.""" + subclass = None + superclass = None + def __init__(self, InscricaoMunicipal=None, NumeroNFTS=None, CodigoVerificacao=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoMunicipal = InscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + self.NumeroNFTS = NumeroNFTS + self.validate_tpNumeroNFTS(self.NumeroNFTS) + self.CodigoVerificacao = CodigoVerificacao + self.validate_tpCodigoVerificacao(self.CodigoVerificacao) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpChaveNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpChaveNFTS.subclass: + return tpChaveNFTS.subclass(*args_, **kwargs_) + else: + return tpChaveNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpNumeroNFTS(self, value): + # Validate type tpNumeroNFTS, a restriction on xs:nonNegativeInteger. + if value is not None and Validate_simpletypes_: + if len(str(value)) >= 12: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroNFTS' % {"value" : value} ) + def validate_tpCodigoVerificacao(self, value): + # Validate type tpCodigoVerificacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 8: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 8: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.InscricaoMunicipal is not None or + self.NumeroNFTS is not None or + self.CodigoVerificacao is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpChaveNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpChaveNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpChaveNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipal, input_name='InscricaoMunicipal'), namespaceprefix_ , eol_)) + if self.NumeroNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroNFTS>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroNFTS, input_name='NumeroNFTS'), namespaceprefix_ , eol_)) + if self.CodigoVerificacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoVerificacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoMunicipal' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + elif nodeName_ == 'NumeroNFTS' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + if ival_ < 0: + raise_parse_error(child_, 'requires nonNegativeInteger') + ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFTS') + self.NumeroNFTS = ival_ + # validate type tpNumeroNFTS + self.validate_tpNumeroNFTS(self.NumeroNFTS) + elif nodeName_ == 'CodigoVerificacao': + CodigoVerificacao_ = child_.text + CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao') + self.CodigoVerificacao = CodigoVerificacao_ + # validate type tpCodigoVerificacao + self.validate_tpCodigoVerificacao(self.CodigoVerificacao) +# end class tpChaveNFTS + + +class tpEndereco(GeneratedsSuper): + """Tipo Endereço.""" + subclass = None + superclass = None + def __init__(self, TipoLogradouro=None, Logradouro=None, NumeroEndereco=None, ComplementoEndereco=None, Bairro=None, Cidade=None, UF=None, CEP=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.TipoLogradouro = TipoLogradouro + self.validate_tpTipoLogradouro(self.TipoLogradouro) + self.Logradouro = Logradouro + self.validate_tpLogradouro(self.Logradouro) + self.NumeroEndereco = NumeroEndereco + self.validate_tpNumeroEndereco(self.NumeroEndereco) + self.ComplementoEndereco = ComplementoEndereco + self.validate_tpComplementoEndereco(self.ComplementoEndereco) + self.Bairro = Bairro + self.validate_tpBairro(self.Bairro) + self.Cidade = Cidade + self.validate_tpCidadeTexto(self.Cidade) + self.UF = UF + self.validate_tpUF(self.UF) + self.CEP = CEP + self.validate_tpCEP(self.CEP) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpEndereco) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpEndereco.subclass: + return tpEndereco.subclass(*args_, **kwargs_) + else: + return tpEndereco(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpTipoLogradouro(self, value): + # Validate type tpTipoLogradouro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 3: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} ) + def validate_tpLogradouro(self, value): + # Validate type tpLogradouro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 50: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} ) + def validate_tpNumeroEndereco(self, value): + # Validate type tpNumeroEndereco, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 10: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} ) + def validate_tpComplementoEndereco(self, value): + # Validate type tpComplementoEndereco, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 30: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} ) + def validate_tpBairro(self, value): + # Validate type tpBairro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 30: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} ) + def validate_tpCidadeTexto(self, value): + # Validate type tpCidadeTexto, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 50: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCidadeTexto' % {"value" : value.encode("utf-8")} ) + if len(value) < 3: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCidadeTexto' % {"value" : value.encode("utf-8")} ) + def validate_tpUF(self, value): + # Validate type tpUF, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpUF' % {"value" : value.encode("utf-8")} ) + if len(value) < 2: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpUF' % {"value" : value.encode("utf-8")} ) + def validate_tpCEP(self, value): + # Validate type tpCEP, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCEP_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCEP_patterns_, )) + validate_tpCEP_patterns_ = [['^[0-9]{7,8}$']] + def hasContent_(self): + if ( + self.TipoLogradouro is not None or + self.Logradouro is not None or + self.NumeroEndereco is not None or + self.ComplementoEndereco is not None or + self.Bairro is not None or + self.Cidade is not None or + self.UF is not None or + self.CEP is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEndereco', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEndereco') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpEndereco') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpEndereco', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpEndereco'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEndereco', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TipoLogradouro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoLogradouro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouro), input_name='TipoLogradouro')), namespaceprefix_ , eol_)) + if self.Logradouro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogradouro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Logradouro), input_name='Logradouro')), namespaceprefix_ , eol_)) + if self.NumeroEndereco is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroEndereco>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroEndereco), input_name='NumeroEndereco')), namespaceprefix_ , eol_)) + if self.ComplementoEndereco is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sComplementoEndereco>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ComplementoEndereco), input_name='ComplementoEndereco')), namespaceprefix_ , eol_)) + if self.Bairro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sBairro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), namespaceprefix_ , eol_)) + if self.Cidade is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCidade>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Cidade), input_name='Cidade')), namespaceprefix_ , eol_)) + if self.UF is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sUF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.UF), input_name='UF')), namespaceprefix_ , eol_)) + if self.CEP is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCEP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CEP, input_name='CEP'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'TipoLogradouro': + TipoLogradouro_ = child_.text + TipoLogradouro_ = self.gds_validate_string(TipoLogradouro_, node, 'TipoLogradouro') + self.TipoLogradouro = TipoLogradouro_ + # validate type tpTipoLogradouro + self.validate_tpTipoLogradouro(self.TipoLogradouro) + elif nodeName_ == 'Logradouro': + Logradouro_ = child_.text + Logradouro_ = self.gds_validate_string(Logradouro_, node, 'Logradouro') + self.Logradouro = Logradouro_ + # validate type tpLogradouro + self.validate_tpLogradouro(self.Logradouro) + elif nodeName_ == 'NumeroEndereco': + NumeroEndereco_ = child_.text + NumeroEndereco_ = self.gds_validate_string(NumeroEndereco_, node, 'NumeroEndereco') + self.NumeroEndereco = NumeroEndereco_ + # validate type tpNumeroEndereco + self.validate_tpNumeroEndereco(self.NumeroEndereco) + elif nodeName_ == 'ComplementoEndereco': + ComplementoEndereco_ = child_.text + ComplementoEndereco_ = self.gds_validate_string(ComplementoEndereco_, node, 'ComplementoEndereco') + self.ComplementoEndereco = ComplementoEndereco_ + # validate type tpComplementoEndereco + self.validate_tpComplementoEndereco(self.ComplementoEndereco) + elif nodeName_ == 'Bairro': + Bairro_ = child_.text + Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro') + self.Bairro = Bairro_ + # validate type tpBairro + self.validate_tpBairro(self.Bairro) + elif nodeName_ == 'Cidade': + Cidade_ = child_.text + Cidade_ = self.gds_validate_string(Cidade_, node, 'Cidade') + self.Cidade = Cidade_ + # validate type tpCidadeTexto + self.validate_tpCidadeTexto(self.Cidade) + elif nodeName_ == 'UF': + UF_ = child_.text + UF_ = self.gds_validate_string(UF_, node, 'UF') + self.UF = UF_ + # validate type tpUF + self.validate_tpUF(self.UF) + elif nodeName_ == 'CEP' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CEP') + self.CEP = ival_ + # validate type tpCEP + self.validate_tpCEP(self.CEP) +# end class tpEndereco + + +class tpPrestador(GeneratedsSuper): + """Dados do Prestador de serviço""" + subclass = None + superclass = None + def __init__(self, CPFCNPJ=None, InscricaoMunicipal=None, RazaoSocialPrestador=None, Endereco=None, Email=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJ = CPFCNPJ + self.InscricaoMunicipal = InscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + self.RazaoSocialPrestador = RazaoSocialPrestador + self.validate_tpRazaoSocial(self.RazaoSocialPrestador) + self.Endereco = Endereco + self.Email = Email + self.validate_tpEmail(self.Email) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpPrestador) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpPrestador.subclass: + return tpPrestador.subclass(*args_, **kwargs_) + else: + return tpPrestador(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpRazaoSocial(self, value): + # Validate type tpRazaoSocial, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + def validate_tpEmail(self, value): + # Validate type tpEmail, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + if not self.gds_validate_simple_patterns( + self.validate_tpEmail_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpEmail_patterns_, )) + validate_tpEmail_patterns_ = [["^^[A-Za-z0-9_]+([-+.'][A-Za-z0-9_]+)*@[A-Za-z0-9_]+([-.][A-Za-z0-9_]+)*\\.[A-Za-z0-9_]+([-.][A-Za-z0-9_]+)*$$"]] + def hasContent_(self): + if ( + self.CPFCNPJ is not None or + self.InscricaoMunicipal is not None or + self.RazaoSocialPrestador is not None or + self.Endereco is not None or + self.Email is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpPrestador', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpPrestador') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpPrestador') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpPrestador', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpPrestador'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpPrestador', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJ is not None: + self.CPFCNPJ.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJ', pretty_print=pretty_print) + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipal, input_name='InscricaoMunicipal'), namespaceprefix_ , eol_)) + if self.RazaoSocialPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRazaoSocialPrestador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), namespaceprefix_ , eol_)) + if self.Endereco is not None: + self.Endereco.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Endereco', pretty_print=pretty_print) + if self.Email is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmail>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Email), input_name='Email')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJ': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJ = obj_ + obj_.original_tagname_ = 'CPFCNPJ' + elif nodeName_ == 'InscricaoMunicipal' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + elif nodeName_ == 'RazaoSocialPrestador': + RazaoSocialPrestador_ = child_.text + RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador') + self.RazaoSocialPrestador = RazaoSocialPrestador_ + # validate type tpRazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocialPrestador) + elif nodeName_ == 'Endereco': + obj_ = tpEndereco.factory(parent_object_=self) + obj_.build(child_) + self.Endereco = obj_ + obj_.original_tagname_ = 'Endereco' + elif nodeName_ == 'Email': + Email_ = child_.text + Email_ = self.gds_validate_string(Email_, node, 'Email') + self.Email = Email_ + # validate type tpEmail + self.validate_tpEmail(self.Email) +# end class tpPrestador + + +class tpTomador(GeneratedsSuper): + """Dados do tomador de serviço""" + subclass = None + superclass = None + def __init__(self, CPFCNPJ=None, RazaoSocial=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJ = CPFCNPJ + self.RazaoSocial = RazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocial) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpTomador) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpTomador.subclass: + return tpTomador.subclass(*args_, **kwargs_) + else: + return tpTomador(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpRazaoSocial(self, value): + # Validate type tpRazaoSocial, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.CPFCNPJ is not None or + self.RazaoSocial is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpTomador', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpTomador') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpTomador') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpTomador', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpTomador'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpTomador', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJ is not None: + self.CPFCNPJ.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJ', pretty_print=pretty_print) + if self.RazaoSocial is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRazaoSocial>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocial), input_name='RazaoSocial')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJ': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJ = obj_ + obj_.original_tagname_ = 'CPFCNPJ' + elif nodeName_ == 'RazaoSocial': + RazaoSocial_ = child_.text + RazaoSocial_ = self.gds_validate_string(RazaoSocial_, node, 'RazaoSocial') + self.RazaoSocial = RazaoSocial_ + # validate type tpRazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocial) +# end class tpTomador + + +class tpNFTS(GeneratedsSuper): + """Tipo que representa uma NFTS.""" + subclass = None + superclass = None + def __init__(self, TipoDocumento=None, ChaveDocumento=None, DataPrestacao=None, StatusNFTS=None, TributacaoNFTS=None, ValorServicos=None, ValorDeducoes=None, CodigoServico=None, CodigoSubItem=None, AliquotaServicos=None, ISSRetidoTomador=None, ISSRetidoIntermediario=None, DescumpreLeiComplementar1572016=None, Prestador=None, RegimeTributacao=None, DataPagamento=None, Discriminacao=None, TipoNFTS=None, Tomador=None, Assinatura=None, CodigoCEI=None, MatriculaObra=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.TipoDocumento = TipoDocumento + self.validate_tpTipoDocumentoNFTS(self.TipoDocumento) + self.ChaveDocumento = ChaveDocumento + if isinstance(DataPrestacao, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataPrestacao, '%Y-%m-%d').date() + else: + initvalue_ = DataPrestacao + self.DataPrestacao = initvalue_ + self.StatusNFTS = StatusNFTS + self.validate_tpStatusNFTS(self.StatusNFTS) + self.TributacaoNFTS = TributacaoNFTS + self.validate_tpTributacaoNFTS(self.TributacaoNFTS) + self.ValorServicos = ValorServicos + self.validate_tpValor(self.ValorServicos) + self.ValorDeducoes = ValorDeducoes + self.validate_tpValor(self.ValorDeducoes) + self.CodigoServico = CodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + self.CodigoSubItem = CodigoSubItem + self.validate_tpCodigoSubitem(self.CodigoSubItem) + self.AliquotaServicos = AliquotaServicos + self.validate_tpAliquota(self.AliquotaServicos) + self.ISSRetidoTomador = ISSRetidoTomador + self.validate_tpISSRetidoTomador(self.ISSRetidoTomador) + self.ISSRetidoIntermediario = ISSRetidoIntermediario + self.validate_tpISSRetidoIntermediario(self.ISSRetidoIntermediario) + self.DescumpreLeiComplementar1572016 = DescumpreLeiComplementar1572016 + self.validate_tpDescumpreLeiComplementar1572016(self.DescumpreLeiComplementar1572016) + self.Prestador = Prestador + self.RegimeTributacao = RegimeTributacao + self.validate_tpRegimeTributacao(self.RegimeTributacao) + if isinstance(DataPagamento, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataPagamento, '%Y-%m-%d').date() + else: + initvalue_ = DataPagamento + self.DataPagamento = initvalue_ + self.Discriminacao = Discriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + self.TipoNFTS = TipoNFTS + self.validate_tpTipoNFTS(self.TipoNFTS) + self.Tomador = Tomador + self.Assinatura = Assinatura + self.validate_tpAssinatura(self.Assinatura) + self.CodigoCEI = CodigoCEI + self.validate_tpNumeroNFTS(self.CodigoCEI) + self.MatriculaObra = MatriculaObra + self.validate_tpNumeroNFTS(self.MatriculaObra) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpNFTS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpNFTS.subclass: + return tpNFTS.subclass(*args_, **kwargs_) + else: + return tpNFTS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpTipoDocumentoNFTS(self, value): + # Validate type tpTipoDocumentoNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['01', '02', '03', '05'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + if len(value) > 2: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + if len(value) < 2: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpStatusNFTS(self, value): + # Validate type tpStatusNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['N', 'C'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpStatusNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpTributacaoNFTS(self, value): + # Validate type tpTributacaoNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['T', 'I', 'J'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTributacaoNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def validate_tpCodigoServico(self, value): + # Validate type tpCodigoServico, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoServico_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoServico_patterns_, )) + validate_tpCodigoServico_patterns_ = [['^[0-9]{4,5}$']] + def validate_tpCodigoSubitem(self, value): + # Validate type tpCodigoSubitem, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoSubitem_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoSubitem_patterns_, )) + validate_tpCodigoSubitem_patterns_ = [['^[0-9]{3,4}$']] + def validate_tpAliquota(self, value): + # Validate type tpAliquota, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} ) + if len(str(value)) >= 5: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} ) + def validate_tpISSRetidoTomador(self, value): + # Validate type tpISSRetidoTomador, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def validate_tpISSRetidoIntermediario(self, value): + # Validate type tpISSRetidoIntermediario, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def validate_tpDescumpreLeiComplementar1572016(self, value): + # Validate type tpDescumpreLeiComplementar1572016, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def validate_tpRegimeTributacao(self, value): + # Validate type tpRegimeTributacao, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpRegimeTributacao_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpRegimeTributacao_patterns_, )) + validate_tpRegimeTributacao_patterns_ = [['^0|4|5$']] + def validate_tpDiscriminacao(self, value): + # Validate type tpDiscriminacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2000: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + def validate_tpTipoNFTS(self, value): + # Validate type tpTipoNFTS, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpTipoNFTS_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpTipoNFTS_patterns_, )) + validate_tpTipoNFTS_patterns_ = [['^1|2$']] + def validate_tpAssinatura(self, value): + # Validate type tpAssinatura, a restriction on xs:base64Binary. + if value is not None and Validate_simpletypes_: + pass + def validate_tpNumeroNFTS(self, value): + # Validate type tpNumeroNFTS, a restriction on xs:nonNegativeInteger. + if value is not None and Validate_simpletypes_: + if len(str(value)) >= 12: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpNumeroNFTS' % {"value" : value} ) + def hasContent_(self): + if ( + self.TipoDocumento is not None or + self.ChaveDocumento is not None or + self.DataPrestacao is not None or + self.StatusNFTS is not None or + self.TributacaoNFTS is not None or + self.ValorServicos is not None or + self.ValorDeducoes is not None or + self.CodigoServico is not None or + self.CodigoSubItem is not None or + self.AliquotaServicos is not None or + self.ISSRetidoTomador is not None or + self.ISSRetidoIntermediario is not None or + self.DescumpreLeiComplementar1572016 is not None or + self.Prestador is not None or + self.RegimeTributacao is not None or + self.DataPagamento is not None or + self.Discriminacao is not None or + self.TipoNFTS is not None or + self.Tomador is not None or + self.Assinatura is not None or + self.CodigoCEI is not None or + self.MatriculaObra is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFTS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNFTS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpNFTS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpNFTS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpNFTS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFTS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TipoDocumento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoDocumento>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoDocumento), input_name='TipoDocumento')), namespaceprefix_ , eol_)) + if self.ChaveDocumento is not None: + self.ChaveDocumento.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveDocumento', pretty_print=pretty_print) + if self.DataPrestacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataPrestacao>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataPrestacao, input_name='DataPrestacao'), namespaceprefix_ , eol_)) + if self.StatusNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusNFTS), input_name='StatusNFTS')), namespaceprefix_ , eol_)) + if self.TributacaoNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTributacaoNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TributacaoNFTS), input_name='TributacaoNFTS')), namespaceprefix_ , eol_)) + if self.ValorServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), namespaceprefix_ , eol_)) + if self.ValorDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), namespaceprefix_ , eol_)) + if self.CodigoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + if self.CodigoSubItem is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoSubItem>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoSubItem, input_name='CodigoSubItem'), namespaceprefix_ , eol_)) + if self.AliquotaServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAliquotaServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.AliquotaServicos, input_name='AliquotaServicos'), namespaceprefix_ , eol_)) + if self.ISSRetidoTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoTomador>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetidoTomador, input_name='ISSRetidoTomador'), namespaceprefix_ , eol_)) + if self.ISSRetidoIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoIntermediario>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetidoIntermediario, input_name='ISSRetidoIntermediario'), namespaceprefix_ , eol_)) + if self.DescumpreLeiComplementar1572016 is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescumpreLeiComplementar1572016>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.DescumpreLeiComplementar1572016, input_name='DescumpreLeiComplementar1572016'), namespaceprefix_ , eol_)) + if self.Prestador is not None: + self.Prestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Prestador', pretty_print=pretty_print) + if self.RegimeTributacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegimeTributacao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RegimeTributacao, input_name='RegimeTributacao'), namespaceprefix_ , eol_)) + if self.DataPagamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataPagamento>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataPagamento, input_name='DataPagamento'), namespaceprefix_ , eol_)) + if self.Discriminacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDiscriminacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), namespaceprefix_ , eol_)) + if self.TipoNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoNFTS>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TipoNFTS, input_name='TipoNFTS'), namespaceprefix_ , eol_)) + if self.Tomador is not None: + self.Tomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Tomador', pretty_print=pretty_print) + if self.Assinatura is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssinatura>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.Assinatura, input_name='Assinatura'), namespaceprefix_ , eol_)) + if self.CodigoCEI is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoCEI>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoCEI, input_name='CodigoCEI'), namespaceprefix_ , eol_)) + if self.MatriculaObra is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sMatriculaObra>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MatriculaObra, input_name='MatriculaObra'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'TipoDocumento': + TipoDocumento_ = child_.text + TipoDocumento_ = self.gds_validate_string(TipoDocumento_, node, 'TipoDocumento') + self.TipoDocumento = TipoDocumento_ + # validate type tpTipoDocumentoNFTS + self.validate_tpTipoDocumentoNFTS(self.TipoDocumento) + elif nodeName_ == 'ChaveDocumento': + obj_ = tpChaveDocumento.factory(parent_object_=self) + obj_.build(child_) + self.ChaveDocumento = obj_ + obj_.original_tagname_ = 'ChaveDocumento' + elif nodeName_ == 'DataPrestacao': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataPrestacao = dval_ + elif nodeName_ == 'StatusNFTS': + StatusNFTS_ = child_.text + StatusNFTS_ = self.gds_validate_string(StatusNFTS_, node, 'StatusNFTS') + self.StatusNFTS = StatusNFTS_ + # validate type tpStatusNFTS + self.validate_tpStatusNFTS(self.StatusNFTS) + elif nodeName_ == 'TributacaoNFTS': + TributacaoNFTS_ = child_.text + TributacaoNFTS_ = self.gds_validate_string(TributacaoNFTS_, node, 'TributacaoNFTS') + self.TributacaoNFTS = TributacaoNFTS_ + # validate type tpTributacaoNFTS + self.validate_tpTributacaoNFTS(self.TributacaoNFTS) + elif nodeName_ == 'ValorServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorServicos') + self.ValorServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorServicos) + elif nodeName_ == 'ValorDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes') + self.ValorDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorDeducoes) + elif nodeName_ == 'CodigoServico' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoServico') + self.CodigoServico = ival_ + # validate type tpCodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + elif nodeName_ == 'CodigoSubItem' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoSubItem') + self.CodigoSubItem = ival_ + # validate type tpCodigoSubitem + self.validate_tpCodigoSubitem(self.CodigoSubItem) + elif nodeName_ == 'AliquotaServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'AliquotaServicos') + self.AliquotaServicos = fval_ + # validate type tpAliquota + self.validate_tpAliquota(self.AliquotaServicos) + elif nodeName_ == 'ISSRetidoTomador': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetidoTomador') + self.ISSRetidoTomador = ival_ + # validate type tpISSRetidoTomador + self.validate_tpISSRetidoTomador(self.ISSRetidoTomador) + elif nodeName_ == 'ISSRetidoIntermediario': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetidoIntermediario') + self.ISSRetidoIntermediario = ival_ + # validate type tpISSRetidoIntermediario + self.validate_tpISSRetidoIntermediario(self.ISSRetidoIntermediario) + elif nodeName_ == 'DescumpreLeiComplementar1572016': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'DescumpreLeiComplementar1572016') + self.DescumpreLeiComplementar1572016 = ival_ + # validate type tpDescumpreLeiComplementar1572016 + self.validate_tpDescumpreLeiComplementar1572016(self.DescumpreLeiComplementar1572016) + elif nodeName_ == 'Prestador': + obj_ = tpPrestador.factory(parent_object_=self) + obj_.build(child_) + self.Prestador = obj_ + obj_.original_tagname_ = 'Prestador' + elif nodeName_ == 'RegimeTributacao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'RegimeTributacao') + self.RegimeTributacao = ival_ + # validate type tpRegimeTributacao + self.validate_tpRegimeTributacao(self.RegimeTributacao) + elif nodeName_ == 'DataPagamento': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataPagamento = dval_ + elif nodeName_ == 'Discriminacao': + Discriminacao_ = child_.text + Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao') + self.Discriminacao = Discriminacao_ + # validate type tpDiscriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + elif nodeName_ == 'TipoNFTS' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'TipoNFTS') + self.TipoNFTS = ival_ + # validate type tpTipoNFTS + self.validate_tpTipoNFTS(self.TipoNFTS) + elif nodeName_ == 'Tomador': + obj_ = tpTomador.factory(parent_object_=self) + obj_.build(child_) + self.Tomador = obj_ + obj_.original_tagname_ = 'Tomador' + elif nodeName_ == 'Assinatura': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'Assinatura') + else: + bval_ = None + self.Assinatura = bval_ + # validate type tpAssinatura + self.validate_tpAssinatura(self.Assinatura) + elif nodeName_ == 'CodigoCEI' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + if ival_ < 0: + raise_parse_error(child_, 'requires nonNegativeInteger') + ival_ = self.gds_validate_integer(ival_, node, 'CodigoCEI') + self.CodigoCEI = ival_ + # validate type tpNumeroNFTS + self.validate_tpNumeroNFTS(self.CodigoCEI) + elif nodeName_ == 'MatriculaObra' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + if ival_ < 0: + raise_parse_error(child_, 'requires nonNegativeInteger') + ival_ = self.gds_validate_integer(ival_, node, 'MatriculaObra') + self.MatriculaObra = ival_ + # validate type tpNumeroNFTS + self.validate_tpNumeroNFTS(self.MatriculaObra) +# end class tpNFTS + + +class tpNFTSRetorno(GeneratedsSuper): + """Tipo que representa uma NFTS de retorno.""" + subclass = None + superclass = None + def __init__(self, TipoDocumento=None, ChaveDocumento=None, ChaveNFTS=None, DataPrestacao=None, StatusNFTS=None, TributacaoNFTS=None, ValorServicos=None, ValorDeducoes=None, CodigoServico=None, CodigoSubItem=None, AliquotaServicos=None, ISSRetidoTomador=None, ISSRetidoIntermediario=None, Prestador=None, RegimeTributacao=None, DataPagamento=None, Discriminacao=None, Tomador=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.TipoDocumento = TipoDocumento + self.validate_tpTipoDocumentoNFTS(self.TipoDocumento) + self.ChaveDocumento = ChaveDocumento + self.ChaveNFTS = ChaveNFTS + if isinstance(DataPrestacao, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataPrestacao, '%Y-%m-%d').date() + else: + initvalue_ = DataPrestacao + self.DataPrestacao = initvalue_ + self.StatusNFTS = StatusNFTS + self.validate_tpStatusNFTS(self.StatusNFTS) + self.TributacaoNFTS = TributacaoNFTS + self.validate_tpTributacaoNFTS(self.TributacaoNFTS) + self.ValorServicos = ValorServicos + self.validate_tpValor(self.ValorServicos) + self.ValorDeducoes = ValorDeducoes + self.validate_tpValor(self.ValorDeducoes) + self.CodigoServico = CodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + self.CodigoSubItem = CodigoSubItem + self.validate_tpCodigoSubitem(self.CodigoSubItem) + self.AliquotaServicos = AliquotaServicos + self.validate_tpAliquota(self.AliquotaServicos) + self.ISSRetidoTomador = ISSRetidoTomador + self.validate_tpISSRetidoTomador(self.ISSRetidoTomador) + self.ISSRetidoIntermediario = ISSRetidoIntermediario + self.validate_tpISSRetidoIntermediario(self.ISSRetidoIntermediario) + self.Prestador = Prestador + self.RegimeTributacao = RegimeTributacao + self.validate_tpRegimeTributacao(self.RegimeTributacao) + if isinstance(DataPagamento, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataPagamento, '%Y-%m-%d').date() + else: + initvalue_ = DataPagamento + self.DataPagamento = initvalue_ + self.Discriminacao = Discriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + self.Tomador = Tomador + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpNFTSRetorno) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpNFTSRetorno.subclass: + return tpNFTSRetorno.subclass(*args_, **kwargs_) + else: + return tpNFTSRetorno(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpTipoDocumentoNFTS(self, value): + # Validate type tpTipoDocumentoNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['01', '02', '03', '05'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + if len(value) > 2: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + if len(value) < 2: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoDocumentoNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpStatusNFTS(self, value): + # Validate type tpStatusNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['N', 'C'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpStatusNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpTributacaoNFTS(self, value): + # Validate type tpTributacaoNFTS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['T', 'I', 'J'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTributacaoNFTS' % {"value" : value.encode("utf-8")} ) + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def validate_tpCodigoServico(self, value): + # Validate type tpCodigoServico, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoServico_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoServico_patterns_, )) + validate_tpCodigoServico_patterns_ = [['^[0-9]{4,5}$']] + def validate_tpCodigoSubitem(self, value): + # Validate type tpCodigoSubitem, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoSubitem_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoSubitem_patterns_, )) + validate_tpCodigoSubitem_patterns_ = [['^[0-9]{3,4}$']] + def validate_tpAliquota(self, value): + # Validate type tpAliquota, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} ) + if len(str(value)) >= 5: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} ) + def validate_tpISSRetidoTomador(self, value): + # Validate type tpISSRetidoTomador, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def validate_tpISSRetidoIntermediario(self, value): + # Validate type tpISSRetidoIntermediario, a restriction on xs:boolean. + if value is not None and Validate_simpletypes_: + pass + def validate_tpRegimeTributacao(self, value): + # Validate type tpRegimeTributacao, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpRegimeTributacao_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpRegimeTributacao_patterns_, )) + validate_tpRegimeTributacao_patterns_ = [['^0|4|5$']] + def validate_tpDiscriminacao(self, value): + # Validate type tpDiscriminacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2000: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.TipoDocumento is not None or + self.ChaveDocumento is not None or + self.ChaveNFTS is not None or + self.DataPrestacao is not None or + self.StatusNFTS is not None or + self.TributacaoNFTS is not None or + self.ValorServicos is not None or + self.ValorDeducoes is not None or + self.CodigoServico is not None or + self.CodigoSubItem is not None or + self.AliquotaServicos is not None or + self.ISSRetidoTomador is not None or + self.ISSRetidoIntermediario is not None or + self.Prestador is not None or + self.RegimeTributacao is not None or + self.DataPagamento is not None or + self.Discriminacao is not None or + self.Tomador is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFTSRetorno', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNFTSRetorno') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpNFTSRetorno') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpNFTSRetorno', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpNFTSRetorno'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFTSRetorno', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TipoDocumento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoDocumento>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoDocumento), input_name='TipoDocumento')), namespaceprefix_ , eol_)) + if self.ChaveDocumento is not None: + self.ChaveDocumento.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveDocumento', pretty_print=pretty_print) + if self.ChaveNFTS is not None: + self.ChaveNFTS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveNFTS', pretty_print=pretty_print) + if self.DataPrestacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataPrestacao>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataPrestacao, input_name='DataPrestacao'), namespaceprefix_ , eol_)) + if self.StatusNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusNFTS), input_name='StatusNFTS')), namespaceprefix_ , eol_)) + if self.TributacaoNFTS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTributacaoNFTS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TributacaoNFTS), input_name='TributacaoNFTS')), namespaceprefix_ , eol_)) + if self.ValorServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), namespaceprefix_ , eol_)) + if self.ValorDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), namespaceprefix_ , eol_)) + if self.CodigoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + if self.CodigoSubItem is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoSubItem>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoSubItem, input_name='CodigoSubItem'), namespaceprefix_ , eol_)) + if self.AliquotaServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAliquotaServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.AliquotaServicos, input_name='AliquotaServicos'), namespaceprefix_ , eol_)) + if self.ISSRetidoTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoTomador>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetidoTomador, input_name='ISSRetidoTomador'), namespaceprefix_ , eol_)) + if self.ISSRetidoIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoIntermediario>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetidoIntermediario, input_name='ISSRetidoIntermediario'), namespaceprefix_ , eol_)) + if self.Prestador is not None: + self.Prestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Prestador', pretty_print=pretty_print) + if self.RegimeTributacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRegimeTributacao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.RegimeTributacao, input_name='RegimeTributacao'), namespaceprefix_ , eol_)) + if self.DataPagamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataPagamento>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataPagamento, input_name='DataPagamento'), namespaceprefix_ , eol_)) + if self.Discriminacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDiscriminacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), namespaceprefix_ , eol_)) + if self.Tomador is not None: + self.Tomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Tomador', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'TipoDocumento': + TipoDocumento_ = child_.text + TipoDocumento_ = self.gds_validate_string(TipoDocumento_, node, 'TipoDocumento') + self.TipoDocumento = TipoDocumento_ + # validate type tpTipoDocumentoNFTS + self.validate_tpTipoDocumentoNFTS(self.TipoDocumento) + elif nodeName_ == 'ChaveDocumento': + obj_ = tpChaveDocumento.factory(parent_object_=self) + obj_.build(child_) + self.ChaveDocumento = obj_ + obj_.original_tagname_ = 'ChaveDocumento' + elif nodeName_ == 'ChaveNFTS': + obj_ = tpChaveNFTS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveNFTS = obj_ + obj_.original_tagname_ = 'ChaveNFTS' + elif nodeName_ == 'DataPrestacao': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataPrestacao = dval_ + elif nodeName_ == 'StatusNFTS': + StatusNFTS_ = child_.text + StatusNFTS_ = self.gds_validate_string(StatusNFTS_, node, 'StatusNFTS') + self.StatusNFTS = StatusNFTS_ + # validate type tpStatusNFTS + self.validate_tpStatusNFTS(self.StatusNFTS) + elif nodeName_ == 'TributacaoNFTS': + TributacaoNFTS_ = child_.text + TributacaoNFTS_ = self.gds_validate_string(TributacaoNFTS_, node, 'TributacaoNFTS') + self.TributacaoNFTS = TributacaoNFTS_ + # validate type tpTributacaoNFTS + self.validate_tpTributacaoNFTS(self.TributacaoNFTS) + elif nodeName_ == 'ValorServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorServicos') + self.ValorServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorServicos) + elif nodeName_ == 'ValorDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes') + self.ValorDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorDeducoes) + elif nodeName_ == 'CodigoServico' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoServico') + self.CodigoServico = ival_ + # validate type tpCodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + elif nodeName_ == 'CodigoSubItem' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoSubItem') + self.CodigoSubItem = ival_ + # validate type tpCodigoSubitem + self.validate_tpCodigoSubitem(self.CodigoSubItem) + elif nodeName_ == 'AliquotaServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'AliquotaServicos') + self.AliquotaServicos = fval_ + # validate type tpAliquota + self.validate_tpAliquota(self.AliquotaServicos) + elif nodeName_ == 'ISSRetidoTomador': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetidoTomador') + self.ISSRetidoTomador = ival_ + # validate type tpISSRetidoTomador + self.validate_tpISSRetidoTomador(self.ISSRetidoTomador) + elif nodeName_ == 'ISSRetidoIntermediario': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetidoIntermediario') + self.ISSRetidoIntermediario = ival_ + # validate type tpISSRetidoIntermediario + self.validate_tpISSRetidoIntermediario(self.ISSRetidoIntermediario) + elif nodeName_ == 'Prestador': + obj_ = tpPrestador.factory(parent_object_=self) + obj_.build(child_) + self.Prestador = obj_ + obj_.original_tagname_ = 'Prestador' + elif nodeName_ == 'RegimeTributacao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'RegimeTributacao') + self.RegimeTributacao = ival_ + # validate type tpRegimeTributacao + self.validate_tpRegimeTributacao(self.RegimeTributacao) + elif nodeName_ == 'DataPagamento': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataPagamento = dval_ + elif nodeName_ == 'Discriminacao': + Discriminacao_ = child_.text + Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao') + self.Discriminacao = Discriminacao_ + # validate type tpDiscriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + elif nodeName_ == 'Tomador': + obj_ = tpTomador.factory(parent_object_=self) + obj_.build(child_) + self.Tomador = obj_ + obj_.original_tagname_ = 'Tomador' +# end class tpNFTSRetorno + + +class tpRetornoCPOM(GeneratedsSuper): + """Lista de mensagens de retorno""" + subclass = None + superclass = None + def __init__(self, Alerta=None, Erro=None, DetalheCPOM=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + self.DetalheCPOM = DetalheCPOM + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpRetornoCPOM) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpRetornoCPOM.subclass: + return tpRetornoCPOM.subclass(*args_, **kwargs_) + else: + return tpRetornoCPOM(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Alerta or + self.Erro or + self.DetalheCPOM is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRetornoCPOM', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRetornoCPOM') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpRetornoCPOM') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpRetornoCPOM', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpRetornoCPOM'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRetornoCPOM', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Alerta_ in self.Alerta: + Alerta_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Alerta', pretty_print=pretty_print) + for Erro_ in self.Erro: + Erro_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Erro', pretty_print=pretty_print) + if self.DetalheCPOM is not None: + self.DetalheCPOM.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheCPOM', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Alerta': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Alerta.append(obj_) + obj_.original_tagname_ = 'Alerta' + elif nodeName_ == 'Erro': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Erro.append(obj_) + obj_.original_tagname_ = 'Erro' + elif nodeName_ == 'DetalheCPOM': + obj_ = tpDetalheCPOMRetorno.factory(parent_object_=self) + obj_.build(child_) + self.DetalheCPOM = obj_ + obj_.original_tagname_ = 'DetalheCPOM' +# end class tpRetornoCPOM + + +class tpDetalheCPOMRetorno(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, CPFCNPJPrestador=None, SituacaoCPOM=None, Servicos=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJPrestador = CPFCNPJPrestador + self.SituacaoCPOM = SituacaoCPOM + self.validate_tpSituacaoCPOM(self.SituacaoCPOM) + if Servicos is None: + self.Servicos = [] + else: + self.Servicos = Servicos + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpDetalheCPOMRetorno) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpDetalheCPOMRetorno.subclass: + return tpDetalheCPOMRetorno.subclass(*args_, **kwargs_) + else: + return tpDetalheCPOMRetorno(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpSituacaoCPOM(self, value): + # Validate type tpSituacaoCPOM, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 200: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpSituacaoCPOM' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.CPFCNPJPrestador is not None or + self.SituacaoCPOM is not None or + self.Servicos + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpDetalheCPOMRetorno', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDetalheCPOMRetorno') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpDetalheCPOMRetorno') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpDetalheCPOMRetorno', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpDetalheCPOMRetorno'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpDetalheCPOMRetorno', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJPrestador is not None: + self.CPFCNPJPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJPrestador', pretty_print=pretty_print) + if self.SituacaoCPOM is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSituacaoCPOM>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SituacaoCPOM), input_name='SituacaoCPOM')), namespaceprefix_ , eol_)) + for Servicos_ in self.Servicos: + Servicos_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Servicos', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJPrestador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJPrestador = obj_ + obj_.original_tagname_ = 'CPFCNPJPrestador' + elif nodeName_ == 'SituacaoCPOM': + SituacaoCPOM_ = child_.text + SituacaoCPOM_ = self.gds_validate_string(SituacaoCPOM_, node, 'SituacaoCPOM') + self.SituacaoCPOM = SituacaoCPOM_ + # validate type tpSituacaoCPOM + self.validate_tpSituacaoCPOM(self.SituacaoCPOM) + elif nodeName_ == 'Servicos': + obj_ = tpServicos.factory(parent_object_=self) + obj_.build(child_) + self.Servicos.append(obj_) + obj_.original_tagname_ = 'Servicos' +# end class tpDetalheCPOMRetorno + + +class tpServicos(GeneratedsSuper): + """Código e descriçao da atividade""" + subclass = None + superclass = None + def __init__(self, CodigoServico=None, DescricaoServico=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CodigoServico = CodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + self.DescricaoServico = DescricaoServico + self.validate_tpDescricaoServico(self.DescricaoServico) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpServicos) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpServicos.subclass: + return tpServicos.subclass(*args_, **kwargs_) + else: + return tpServicos(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpCodigoServico(self, value): + # Validate type tpCodigoServico, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoServico_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoServico_patterns_, )) + validate_tpCodigoServico_patterns_ = [['^[0-9]{4,5}$']] + def validate_tpDescricaoServico(self, value): + # Validate type tpDescricaoServico, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 250: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoServico' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.CodigoServico is not None or + self.DescricaoServico is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpServicos', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpServicos') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpServicos') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpServicos', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpServicos'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpServicos', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CodigoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + if self.DescricaoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescricaoServico>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.DescricaoServico), input_name='DescricaoServico')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CodigoServico' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoServico') + self.CodigoServico = ival_ + # validate type tpCodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + elif nodeName_ == 'DescricaoServico': + DescricaoServico_ = child_.text + DescricaoServico_ = self.gds_validate_string(DescricaoServico_, node, 'DescricaoServico') + self.DescricaoServico = DescricaoServico_ + # validate type tpDescricaoServico + self.validate_tpDescricaoServico(self.DescricaoServico) +# end class tpServicos + + +class tpDetalheNFSERetorno(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, CPFCNPJPrestador=None, ListaInscricaoMunicipal=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPFCNPJPrestador = CPFCNPJPrestador + if ListaInscricaoMunicipal is None: + self.ListaInscricaoMunicipal = [] + else: + self.ListaInscricaoMunicipal = ListaInscricaoMunicipal + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpDetalheNFSERetorno) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpDetalheNFSERetorno.subclass: + return tpDetalheNFSERetorno.subclass(*args_, **kwargs_) + else: + return tpDetalheNFSERetorno(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CPFCNPJPrestador is not None or + self.ListaInscricaoMunicipal + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpDetalheNFSERetorno', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpDetalheNFSERetorno') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpDetalheNFSERetorno') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpDetalheNFSERetorno', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpDetalheNFSERetorno'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpDetalheNFSERetorno', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPFCNPJPrestador is not None: + self.CPFCNPJPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJPrestador', pretty_print=pretty_print) + for ListaInscricaoMunicipal_ in self.ListaInscricaoMunicipal: + ListaInscricaoMunicipal_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ListaInscricaoMunicipal', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPFCNPJPrestador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJPrestador = obj_ + obj_.original_tagname_ = 'CPFCNPJPrestador' + elif nodeName_ == 'ListaInscricaoMunicipal': + obj_ = tpListaInscricaoMunicipal.factory(parent_object_=self) + obj_.build(child_) + self.ListaInscricaoMunicipal.append(obj_) + obj_.original_tagname_ = 'ListaInscricaoMunicipal' +# end class tpDetalheNFSERetorno + + +class tpListaInscricaoMunicipal(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, InscricaoMunicipal=None, SituacaoInscricaoMunicipal=None, SituacaoEmissaoNFSE=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoMunicipal = InscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + self.SituacaoInscricaoMunicipal = SituacaoInscricaoMunicipal + self.validate_tpSituacaoInscricaoMunicipal(self.SituacaoInscricaoMunicipal) + self.SituacaoEmissaoNFSE = SituacaoEmissaoNFSE + self.validate_tpSituacaoEmissaoNFSE(self.SituacaoEmissaoNFSE) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpListaInscricaoMunicipal) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpListaInscricaoMunicipal.subclass: + return tpListaInscricaoMunicipal.subclass(*args_, **kwargs_) + else: + return tpListaInscricaoMunicipal(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpSituacaoInscricaoMunicipal(self, value): + # Validate type tpSituacaoInscricaoMunicipal, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 200: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpSituacaoInscricaoMunicipal' % {"value" : value.encode("utf-8")} ) + def validate_tpSituacaoEmissaoNFSE(self, value): + # Validate type tpSituacaoEmissaoNFSE, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 200: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpSituacaoEmissaoNFSE' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.InscricaoMunicipal is not None or + self.SituacaoInscricaoMunicipal is not None or + self.SituacaoEmissaoNFSE is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaInscricaoMunicipal', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpListaInscricaoMunicipal') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpListaInscricaoMunicipal') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpListaInscricaoMunicipal', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpListaInscricaoMunicipal'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpListaInscricaoMunicipal', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipal, input_name='InscricaoMunicipal'), namespaceprefix_ , eol_)) + if self.SituacaoInscricaoMunicipal is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSituacaoInscricaoMunicipal>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SituacaoInscricaoMunicipal), input_name='SituacaoInscricaoMunicipal')), namespaceprefix_ , eol_)) + if self.SituacaoEmissaoNFSE is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSituacaoEmissaoNFSE>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SituacaoEmissaoNFSE), input_name='SituacaoEmissaoNFSE')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoMunicipal' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipal') + self.InscricaoMunicipal = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipal) + elif nodeName_ == 'SituacaoInscricaoMunicipal': + SituacaoInscricaoMunicipal_ = child_.text + SituacaoInscricaoMunicipal_ = self.gds_validate_string(SituacaoInscricaoMunicipal_, node, 'SituacaoInscricaoMunicipal') + self.SituacaoInscricaoMunicipal = SituacaoInscricaoMunicipal_ + # validate type tpSituacaoInscricaoMunicipal + self.validate_tpSituacaoInscricaoMunicipal(self.SituacaoInscricaoMunicipal) + elif nodeName_ == 'SituacaoEmissaoNFSE': + SituacaoEmissaoNFSE_ = child_.text + SituacaoEmissaoNFSE_ = self.gds_validate_string(SituacaoEmissaoNFSE_, node, 'SituacaoEmissaoNFSE') + self.SituacaoEmissaoNFSE = SituacaoEmissaoNFSE_ + # validate type tpSituacaoEmissaoNFSE + self.validate_tpSituacaoEmissaoNFSE(self.SituacaoEmissaoNFSE) +# end class tpListaInscricaoMunicipal + + +class tpRetornoEmissaoNFSE(GeneratedsSuper): + """Lista de mensagens de retorno""" + subclass = None + superclass = None + def __init__(self, Alerta=None, Erro=None, DetalheEmissaoNFSE=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if Alerta is None: + self.Alerta = [] + else: + self.Alerta = Alerta + if Erro is None: + self.Erro = [] + else: + self.Erro = Erro + self.DetalheEmissaoNFSE = DetalheEmissaoNFSE + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpRetornoEmissaoNFSE) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpRetornoEmissaoNFSE.subclass: + return tpRetornoEmissaoNFSE.subclass(*args_, **kwargs_) + else: + return tpRetornoEmissaoNFSE(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Alerta or + self.Erro or + self.DetalheEmissaoNFSE is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRetornoEmissaoNFSE', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRetornoEmissaoNFSE') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpRetornoEmissaoNFSE') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpRetornoEmissaoNFSE', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpRetornoEmissaoNFSE'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRetornoEmissaoNFSE', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Alerta_ in self.Alerta: + Alerta_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Alerta', pretty_print=pretty_print) + for Erro_ in self.Erro: + Erro_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Erro', pretty_print=pretty_print) + if self.DetalheEmissaoNFSE is not None: + self.DetalheEmissaoNFSE.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DetalheEmissaoNFSE', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Alerta': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Alerta.append(obj_) + obj_.original_tagname_ = 'Alerta' + elif nodeName_ == 'Erro': + obj_ = tpEvento.factory(parent_object_=self) + obj_.build(child_) + self.Erro.append(obj_) + obj_.original_tagname_ = 'Erro' + elif nodeName_ == 'DetalheEmissaoNFSE': + obj_ = tpDetalheNFSERetorno.factory(parent_object_=self) + obj_.build(child_) + self.DetalheEmissaoNFSE = obj_ + obj_.original_tagname_ = 'DetalheEmissaoNFSE' +# end class tpRetornoEmissaoNFSE + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:tipos="http://www.prefeitura.sp.gov.br/nfts/tipos"', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:tipos="http://www.prefeitura.sp.gov.br/nfts/tipos"') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from TiposNFTS_v01 import *\n\n') + sys.stdout.write('import TiposNFTS_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "tpCPFCNPJ", + "tpCabecalho", + "tpCabecalhoRetorno", + "tpChaveDocumento", + "tpChaveNFTS", + "tpDetalheCPOMRetorno", + "tpDetalheNFSERetorno", + "tpEndereco", + "tpEvento", + "tpIdentificacaoDocumento", + "tpIdentificacaoNFTS", + "tpInformacoesLoteNFTS", + "tpListaInscricaoMunicipal", + "tpListaRetornoConsultaNFTS", + "tpListaRetornoLote", + "tpListaRetornoNFTS", + "tpNFTS", + "tpNFTSRetorno", + "tpPrestador", + "tpRemetente", + "tpRemetente_ComCCM", + "tpRetornoCPOM", + "tpRetornoEmissaoNFSE", + "tpServicos", + "tpTomador" +] diff --git a/nfselib/paulistana/v02/TiposNFe_v01.py b/nfselib/paulistana/v02/TiposNFe_v01.py new file mode 100644 index 0000000..8b2f561 --- /dev/null +++ b/nfselib/paulistana/v02/TiposNFe_v01.py @@ -0,0 +1,3466 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/TiposNFe_v01.py') +# +# Command line arguments: +# ./process_includes/TiposNFe_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/TiposNFe_v01.py" ./process_includes/TiposNFe_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class tpEvento(GeneratedsSuper): + """Chave para identificação da origem do evento.""" + subclass = None + superclass = None + def __init__(self, Codigo=None, Descricao=None, ChaveRPS=None, ChaveNFe=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Codigo = Codigo + self.validate_tpCodigoEvento(self.Codigo) + self.Descricao = Descricao + self.validate_tpDescricaoEvento(self.Descricao) + self.ChaveRPS = ChaveRPS + self.ChaveNFe = ChaveNFe + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpEvento) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpEvento.subclass: + return tpEvento.subclass(*args_, **kwargs_) + else: + return tpEvento(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpCodigoEvento(self, value): + # Validate type tpCodigoEvento, a restriction on xs:short. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoEvento_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoEvento_patterns_, )) + validate_tpCodigoEvento_patterns_ = [['^[0-9]{3,4}$']] + def validate_tpDescricaoEvento(self, value): + # Validate type tpDescricaoEvento, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 300: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDescricaoEvento' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.Codigo is not None or + self.Descricao is not None or + self.ChaveRPS is not None or + self.ChaveNFe is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEvento', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEvento') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpEvento') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpEvento', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpEvento'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEvento', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Codigo is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigo>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Codigo, input_name='Codigo'), namespaceprefix_ , eol_)) + if self.Descricao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDescricao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Descricao), input_name='Descricao')), namespaceprefix_ , eol_)) + if self.ChaveRPS is not None: + self.ChaveRPS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveRPS', pretty_print=pretty_print) + if self.ChaveNFe is not None: + self.ChaveNFe.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveNFe', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Codigo' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'Codigo') + self.Codigo = ival_ + # validate type tpCodigoEvento + self.validate_tpCodigoEvento(self.Codigo) + elif nodeName_ == 'Descricao': + Descricao_ = child_.text + Descricao_ = self.gds_validate_string(Descricao_, node, 'Descricao') + self.Descricao = Descricao_ + # validate type tpDescricaoEvento + self.validate_tpDescricaoEvento(self.Descricao) + elif nodeName_ == 'ChaveRPS': + obj_ = tpChaveRPS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveRPS = obj_ + obj_.original_tagname_ = 'ChaveRPS' + elif nodeName_ == 'ChaveNFe': + obj_ = tpChaveNFe.factory(parent_object_=self) + obj_.build(child_) + self.ChaveNFe = obj_ + obj_.original_tagname_ = 'ChaveNFe' +# end class tpEvento + + +class tpCPFCNPJ(GeneratedsSuper): + """Tipo que representa um CPF/CNPJ.""" + subclass = None + superclass = None + def __init__(self, CPF=None, CNPJ=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.CPF = CPF + self.validate_tpCPF(self.CPF) + self.CNPJ = CNPJ + self.validate_tpCNPJ(self.CNPJ) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpCPFCNPJ) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpCPFCNPJ.subclass: + return tpCPFCNPJ.subclass(*args_, **kwargs_) + else: + return tpCPFCNPJ(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpCPF(self, value): + # Validate type tpCPF, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCPF_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCPF_patterns_, )) + validate_tpCPF_patterns_ = [['^[0-9]{0}|[0-9]{11}$']] + def validate_tpCNPJ(self, value): + # Validate type tpCNPJ, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCNPJ_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCNPJ_patterns_, )) + validate_tpCNPJ_patterns_ = [['^[0-9]{14}$']] + def hasContent_(self): + if ( + self.CPF is not None or + self.CNPJ is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCPFCNPJ', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpCPFCNPJ') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpCPFCNPJ') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpCPFCNPJ', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpCPFCNPJ'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpCPFCNPJ', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CPF is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCPF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPF), input_name='CPF')), namespaceprefix_ , eol_)) + if self.CNPJ is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCNPJ>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CNPJ), input_name='CNPJ')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CPF': + CPF_ = child_.text + CPF_ = self.gds_validate_string(CPF_, node, 'CPF') + self.CPF = CPF_ + # validate type tpCPF + self.validate_tpCPF(self.CPF) + elif nodeName_ == 'CNPJ': + CNPJ_ = child_.text + CNPJ_ = self.gds_validate_string(CNPJ_, node, 'CNPJ') + self.CNPJ = CNPJ_ + # validate type tpCNPJ + self.validate_tpCNPJ(self.CNPJ) +# end class tpCPFCNPJ + + +class tpChaveNFeRPS(GeneratedsSuper): + """Tipo que representa a chave de uma NFS-e e a Chave do RPS que a + mesma substitui.""" + subclass = None + superclass = None + def __init__(self, ChaveNFe=None, ChaveRPS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.ChaveNFe = ChaveNFe + self.ChaveRPS = ChaveRPS + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpChaveNFeRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpChaveNFeRPS.subclass: + return tpChaveNFeRPS.subclass(*args_, **kwargs_) + else: + return tpChaveNFeRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.ChaveNFe is not None or + self.ChaveRPS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFeRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveNFeRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpChaveNFeRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpChaveNFeRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpChaveNFeRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFeRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.ChaveNFe is not None: + self.ChaveNFe.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveNFe', pretty_print=pretty_print) + if self.ChaveRPS is not None: + self.ChaveRPS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveRPS', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'ChaveNFe': + obj_ = tpChaveNFe.factory(parent_object_=self) + obj_.build(child_) + self.ChaveNFe = obj_ + obj_.original_tagname_ = 'ChaveNFe' + elif nodeName_ == 'ChaveRPS': + obj_ = tpChaveRPS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveRPS = obj_ + obj_.original_tagname_ = 'ChaveRPS' +# end class tpChaveNFeRPS + + +class tpChaveNFe(GeneratedsSuper): + """Chave de identificação da NFS-e.""" + subclass = None + superclass = None + def __init__(self, InscricaoPrestador=None, NumeroNFe=None, CodigoVerificacao=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoPrestador = InscricaoPrestador + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + self.NumeroNFe = NumeroNFe + self.validate_tpNumero(self.NumeroNFe) + self.CodigoVerificacao = CodigoVerificacao + self.validate_tpCodigoVerificacao(self.CodigoVerificacao) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpChaveNFe) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpChaveNFe.subclass: + return tpChaveNFe.subclass(*args_, **kwargs_) + else: + return tpChaveNFe(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpNumero(self, value): + # Validate type tpNumero, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpNumero_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, )) + validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']] + def validate_tpCodigoVerificacao(self, value): + # Validate type tpCodigoVerificacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 8: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 8: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpCodigoVerificacao' % {"value" : value.encode("utf-8")} ) + def hasContent_(self): + if ( + self.InscricaoPrestador is not None or + self.NumeroNFe is not None or + self.CodigoVerificacao is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFe', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveNFe') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpChaveNFe') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpChaveNFe', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpChaveNFe'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveNFe', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), namespaceprefix_ , eol_)) + if self.NumeroNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroNFe>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroNFe, input_name='NumeroNFe'), namespaceprefix_ , eol_)) + if self.CodigoVerificacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoVerificacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CodigoVerificacao), input_name='CodigoVerificacao')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoPrestador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador') + self.InscricaoPrestador = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + elif nodeName_ == 'NumeroNFe' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroNFe') + self.NumeroNFe = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroNFe) + elif nodeName_ == 'CodigoVerificacao': + CodigoVerificacao_ = child_.text + CodigoVerificacao_ = self.gds_validate_string(CodigoVerificacao_, node, 'CodigoVerificacao') + self.CodigoVerificacao = CodigoVerificacao_ + # validate type tpCodigoVerificacao + self.validate_tpCodigoVerificacao(self.CodigoVerificacao) +# end class tpChaveNFe + + +class tpChaveRPS(GeneratedsSuper): + """Tipo que define a chave identificadora de um RPS.""" + subclass = None + superclass = None + def __init__(self, InscricaoPrestador=None, SerieRPS=None, NumeroRPS=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.InscricaoPrestador = InscricaoPrestador + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + self.SerieRPS = SerieRPS + self.validate_tpSerieRPS(self.SerieRPS) + self.NumeroRPS = NumeroRPS + self.validate_tpNumero(self.NumeroRPS) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpChaveRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpChaveRPS.subclass: + return tpChaveRPS.subclass(*args_, **kwargs_) + else: + return tpChaveRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpSerieRPS(self, value): + # Validate type tpSerieRPS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 5: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpSerieRPS' % {"value" : value.encode("utf-8")} ) + def validate_tpNumero(self, value): + # Validate type tpNumero, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpNumero_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, )) + validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']] + def hasContent_(self): + if ( + self.InscricaoPrestador is not None or + self.SerieRPS is not None or + self.NumeroRPS is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpChaveRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpChaveRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpChaveRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpChaveRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpChaveRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.InscricaoPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), namespaceprefix_ , eol_)) + if self.SerieRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sSerieRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), namespaceprefix_ , eol_)) + if self.NumeroRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroRPS>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'InscricaoPrestador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador') + self.InscricaoPrestador = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + elif nodeName_ == 'SerieRPS': + SerieRPS_ = child_.text + SerieRPS_ = self.gds_validate_string(SerieRPS_, node, 'SerieRPS') + self.SerieRPS = SerieRPS_ + # validate type tpSerieRPS + self.validate_tpSerieRPS(self.SerieRPS) + elif nodeName_ == 'NumeroRPS' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroRPS') + self.NumeroRPS = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroRPS) +# end class tpChaveRPS + + +class tpEndereco(GeneratedsSuper): + """Tipo Endereço.""" + subclass = None + superclass = None + def __init__(self, TipoLogradouro=None, Logradouro=None, NumeroEndereco=None, ComplementoEndereco=None, Bairro=None, Cidade=None, UF=None, CEP=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.TipoLogradouro = TipoLogradouro + self.validate_tpTipoLogradouro(self.TipoLogradouro) + self.Logradouro = Logradouro + self.validate_tpLogradouro(self.Logradouro) + self.NumeroEndereco = NumeroEndereco + self.validate_tpNumeroEndereco(self.NumeroEndereco) + self.ComplementoEndereco = ComplementoEndereco + self.validate_tpComplementoEndereco(self.ComplementoEndereco) + self.Bairro = Bairro + self.validate_tpBairro(self.Bairro) + self.Cidade = Cidade + self.validate_tpCidade(self.Cidade) + self.UF = UF + self.validate_tpUF(self.UF) + self.CEP = CEP + self.validate_tpCEP(self.CEP) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpEndereco) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpEndereco.subclass: + return tpEndereco.subclass(*args_, **kwargs_) + else: + return tpEndereco(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpTipoLogradouro(self, value): + # Validate type tpTipoLogradouro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 3: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTipoLogradouro' % {"value" : value.encode("utf-8")} ) + def validate_tpLogradouro(self, value): + # Validate type tpLogradouro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 50: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpLogradouro' % {"value" : value.encode("utf-8")} ) + def validate_tpNumeroEndereco(self, value): + # Validate type tpNumeroEndereco, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 10: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpNumeroEndereco' % {"value" : value.encode("utf-8")} ) + def validate_tpComplementoEndereco(self, value): + # Validate type tpComplementoEndereco, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 30: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpComplementoEndereco' % {"value" : value.encode("utf-8")} ) + def validate_tpBairro(self, value): + # Validate type tpBairro, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 30: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpBairro' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpBairro' % {"value" : value.encode("utf-8")} ) + def validate_tpCidade(self, value): + # Validate type tpCidade, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCidade_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCidade_patterns_, )) + validate_tpCidade_patterns_ = [['^[0-9]{7}$']] + def validate_tpUF(self, value): + # Validate type tpUF, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpUF' % {"value" : value.encode("utf-8")} ) + if len(value) < 2: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpUF' % {"value" : value.encode("utf-8")} ) + def validate_tpCEP(self, value): + # Validate type tpCEP, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCEP_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCEP_patterns_, )) + validate_tpCEP_patterns_ = [['^[0-9]{7,8}$']] + def hasContent_(self): + if ( + self.TipoLogradouro is not None or + self.Logradouro is not None or + self.NumeroEndereco is not None or + self.ComplementoEndereco is not None or + self.Bairro is not None or + self.Cidade is not None or + self.UF is not None or + self.CEP is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEndereco', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpEndereco') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpEndereco') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpEndereco', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpEndereco'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpEndereco', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.TipoLogradouro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoLogradouro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoLogradouro), input_name='TipoLogradouro')), namespaceprefix_ , eol_)) + if self.Logradouro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sLogradouro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Logradouro), input_name='Logradouro')), namespaceprefix_ , eol_)) + if self.NumeroEndereco is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroEndereco>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.NumeroEndereco), input_name='NumeroEndereco')), namespaceprefix_ , eol_)) + if self.ComplementoEndereco is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sComplementoEndereco>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ComplementoEndereco), input_name='ComplementoEndereco')), namespaceprefix_ , eol_)) + if self.Bairro is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sBairro>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Bairro), input_name='Bairro')), namespaceprefix_ , eol_)) + if self.Cidade is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCidade>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.Cidade, input_name='Cidade'), namespaceprefix_ , eol_)) + if self.UF is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sUF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.UF), input_name='UF')), namespaceprefix_ , eol_)) + if self.CEP is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCEP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CEP, input_name='CEP'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'TipoLogradouro': + TipoLogradouro_ = child_.text + TipoLogradouro_ = self.gds_validate_string(TipoLogradouro_, node, 'TipoLogradouro') + self.TipoLogradouro = TipoLogradouro_ + # validate type tpTipoLogradouro + self.validate_tpTipoLogradouro(self.TipoLogradouro) + elif nodeName_ == 'Logradouro': + Logradouro_ = child_.text + Logradouro_ = self.gds_validate_string(Logradouro_, node, 'Logradouro') + self.Logradouro = Logradouro_ + # validate type tpLogradouro + self.validate_tpLogradouro(self.Logradouro) + elif nodeName_ == 'NumeroEndereco': + NumeroEndereco_ = child_.text + NumeroEndereco_ = self.gds_validate_string(NumeroEndereco_, node, 'NumeroEndereco') + self.NumeroEndereco = NumeroEndereco_ + # validate type tpNumeroEndereco + self.validate_tpNumeroEndereco(self.NumeroEndereco) + elif nodeName_ == 'ComplementoEndereco': + ComplementoEndereco_ = child_.text + ComplementoEndereco_ = self.gds_validate_string(ComplementoEndereco_, node, 'ComplementoEndereco') + self.ComplementoEndereco = ComplementoEndereco_ + # validate type tpComplementoEndereco + self.validate_tpComplementoEndereco(self.ComplementoEndereco) + elif nodeName_ == 'Bairro': + Bairro_ = child_.text + Bairro_ = self.gds_validate_string(Bairro_, node, 'Bairro') + self.Bairro = Bairro_ + # validate type tpBairro + self.validate_tpBairro(self.Bairro) + elif nodeName_ == 'Cidade' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'Cidade') + self.Cidade = ival_ + # validate type tpCidade + self.validate_tpCidade(self.Cidade) + elif nodeName_ == 'UF': + UF_ = child_.text + UF_ = self.gds_validate_string(UF_, node, 'UF') + self.UF = UF_ + # validate type tpUF + self.validate_tpUF(self.UF) + elif nodeName_ == 'CEP' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CEP') + self.CEP = ival_ + # validate type tpCEP + self.validate_tpCEP(self.CEP) +# end class tpEndereco + + +class tpInformacoesLote(GeneratedsSuper): + """Informações do lote processado.""" + subclass = None + superclass = None + def __init__(self, NumeroLote=None, InscricaoPrestador=None, CPFCNPJRemetente=None, DataEnvioLote=None, QtdNotasProcessadas=None, TempoProcessamento=None, ValorTotalServicos=None, ValorTotalDeducoes=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.NumeroLote = NumeroLote + self.validate_tpNumero(self.NumeroLote) + self.InscricaoPrestador = InscricaoPrestador + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + self.CPFCNPJRemetente = CPFCNPJRemetente + if isinstance(DataEnvioLote, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataEnvioLote, '%Y-%m-%dT%H:%M:%S') + else: + initvalue_ = DataEnvioLote + self.DataEnvioLote = initvalue_ + self.QtdNotasProcessadas = QtdNotasProcessadas + self.validate_tpQuantidade(self.QtdNotasProcessadas) + self.TempoProcessamento = TempoProcessamento + self.validate_tpTempoProcessamento(self.TempoProcessamento) + self.ValorTotalServicos = ValorTotalServicos + self.validate_tpValor(self.ValorTotalServicos) + self.ValorTotalDeducoes = ValorTotalDeducoes + self.validate_tpValor(self.ValorTotalDeducoes) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpInformacoesLote) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpInformacoesLote.subclass: + return tpInformacoesLote.subclass(*args_, **kwargs_) + else: + return tpInformacoesLote(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpNumero(self, value): + # Validate type tpNumero, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpNumero_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, )) + validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']] + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpQuantidade(self, value): + # Validate type tpQuantidade, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpQuantidade_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpQuantidade_patterns_, )) + validate_tpQuantidade_patterns_ = [['^[0-9]{1,15}$']] + def validate_tpTempoProcessamento(self, value): + # Validate type tpTempoProcessamento, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpTempoProcessamento_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpTempoProcessamento_patterns_, )) + validate_tpTempoProcessamento_patterns_ = [['^[0-9]{1,15}$']] + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def hasContent_(self): + if ( + self.NumeroLote is not None or + self.InscricaoPrestador is not None or + self.CPFCNPJRemetente is not None or + self.DataEnvioLote is not None or + self.QtdNotasProcessadas is not None or + self.TempoProcessamento is not None or + self.ValorTotalServicos is not None or + self.ValorTotalDeducoes is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpInformacoesLote', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpInformacoesLote') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpInformacoesLote') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpInformacoesLote', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpInformacoesLote'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpInformacoesLote', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), namespaceprefix_ , eol_)) + if self.InscricaoPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), namespaceprefix_ , eol_)) + if self.CPFCNPJRemetente is not None: + self.CPFCNPJRemetente.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJRemetente', pretty_print=pretty_print) + if self.DataEnvioLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataEnvioLote>%s%s' % (namespaceprefix_ , self.gds_format_datetime(self.DataEnvioLote, input_name='DataEnvioLote'), namespaceprefix_ , eol_)) + if self.QtdNotasProcessadas is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sQtdNotasProcessadas>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.QtdNotasProcessadas, input_name='QtdNotasProcessadas'), namespaceprefix_ , eol_)) + if self.TempoProcessamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTempoProcessamento>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.TempoProcessamento, input_name='TempoProcessamento'), namespaceprefix_ , eol_)) + if self.ValorTotalServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalServicos, input_name='ValorTotalServicos'), namespaceprefix_ , eol_)) + if self.ValorTotalDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalDeducoes, input_name='ValorTotalDeducoes'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'NumeroLote' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote') + self.NumeroLote = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroLote) + elif nodeName_ == 'InscricaoPrestador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoPrestador') + self.InscricaoPrestador = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoPrestador) + elif nodeName_ == 'CPFCNPJRemetente': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJRemetente = obj_ + obj_.original_tagname_ = 'CPFCNPJRemetente' + elif nodeName_ == 'DataEnvioLote': + sval_ = child_.text + dval_ = self.gds_parse_datetime(sval_) + self.DataEnvioLote = dval_ + elif nodeName_ == 'QtdNotasProcessadas' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'QtdNotasProcessadas') + self.QtdNotasProcessadas = ival_ + # validate type tpQuantidade + self.validate_tpQuantidade(self.QtdNotasProcessadas) + elif nodeName_ == 'TempoProcessamento' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'TempoProcessamento') + self.TempoProcessamento = ival_ + # validate type tpTempoProcessamento + self.validate_tpTempoProcessamento(self.TempoProcessamento) + elif nodeName_ == 'ValorTotalServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalServicos') + self.ValorTotalServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalServicos) + elif nodeName_ == 'ValorTotalDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalDeducoes') + self.ValorTotalDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalDeducoes) +# end class tpInformacoesLote + + +class tpNFe(GeneratedsSuper): + """Tipo que representa uma NFS-e""" + subclass = None + superclass = None + def __init__(self, Assinatura=None, ChaveNFe=None, DataEmissaoNFe=None, NumeroLote=None, ChaveRPS=None, TipoRPS=None, DataEmissaoRPS=None, CPFCNPJPrestador=None, RazaoSocialPrestador=None, EnderecoPrestador=None, EmailPrestador=None, StatusNFe=None, DataCancelamento=None, TributacaoNFe=None, OpcaoSimples=None, NumeroGuia=None, DataQuitacaoGuia=None, ValorServicos=None, ValorDeducoes=None, ValorPIS=None, ValorCOFINS=None, ValorINSS=None, ValorIR=None, ValorCSLL=None, CodigoServico=None, AliquotaServicos=None, ValorISS=None, ValorCredito=None, ISSRetido=None, CPFCNPJTomador=None, InscricaoMunicipalTomador=None, InscricaoEstadualTomador=None, RazaoSocialTomador=None, EnderecoTomador=None, EmailTomador=None, CPFCNPJIntermediario=None, InscricaoMunicipalIntermediario=None, ISSRetidoIntermediario=None, EmailIntermediario=None, Discriminacao=None, ValorCargaTributaria=None, PercentualCargaTributaria=None, FonteCargaTributaria=None, CodigoCEI=None, MatriculaObra=None, MunicipioPrestacao=None, NumeroEncapsulamento=None, ValorTotalRecebido=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Assinatura = Assinatura + self.validate_tpAssinatura(self.Assinatura) + self.ChaveNFe = ChaveNFe + if isinstance(DataEmissaoNFe, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataEmissaoNFe, '%Y-%m-%dT%H:%M:%S') + else: + initvalue_ = DataEmissaoNFe + self.DataEmissaoNFe = initvalue_ + self.NumeroLote = NumeroLote + self.validate_tpNumero(self.NumeroLote) + self.ChaveRPS = ChaveRPS + self.TipoRPS = TipoRPS + self.validate_tpTipoRPS(self.TipoRPS) + if isinstance(DataEmissaoRPS, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataEmissaoRPS, '%Y-%m-%d').date() + else: + initvalue_ = DataEmissaoRPS + self.DataEmissaoRPS = initvalue_ + self.CPFCNPJPrestador = CPFCNPJPrestador + self.RazaoSocialPrestador = RazaoSocialPrestador + self.validate_tpRazaoSocial(self.RazaoSocialPrestador) + self.EnderecoPrestador = EnderecoPrestador + self.EmailPrestador = EmailPrestador + self.validate_tpEmail(self.EmailPrestador) + self.StatusNFe = StatusNFe + self.validate_tpStatusNFe(self.StatusNFe) + if isinstance(DataCancelamento, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataCancelamento, '%Y-%m-%dT%H:%M:%S') + else: + initvalue_ = DataCancelamento + self.DataCancelamento = initvalue_ + self.TributacaoNFe = TributacaoNFe + self.validate_tpTributacaoNFe(self.TributacaoNFe) + self.OpcaoSimples = OpcaoSimples + self.validate_tpOpcaoSimples(self.OpcaoSimples) + self.NumeroGuia = NumeroGuia + self.validate_tpNumero(self.NumeroGuia) + if isinstance(DataQuitacaoGuia, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataQuitacaoGuia, '%Y-%m-%d').date() + else: + initvalue_ = DataQuitacaoGuia + self.DataQuitacaoGuia = initvalue_ + self.ValorServicos = ValorServicos + self.validate_tpValor(self.ValorServicos) + self.ValorDeducoes = ValorDeducoes + self.validate_tpValor(self.ValorDeducoes) + self.ValorPIS = ValorPIS + self.validate_tpValor(self.ValorPIS) + self.ValorCOFINS = ValorCOFINS + self.validate_tpValor(self.ValorCOFINS) + self.ValorINSS = ValorINSS + self.validate_tpValor(self.ValorINSS) + self.ValorIR = ValorIR + self.validate_tpValor(self.ValorIR) + self.ValorCSLL = ValorCSLL + self.validate_tpValor(self.ValorCSLL) + self.CodigoServico = CodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + self.AliquotaServicos = AliquotaServicos + self.validate_tpAliquota(self.AliquotaServicos) + self.ValorISS = ValorISS + self.validate_tpValor(self.ValorISS) + self.ValorCredito = ValorCredito + self.validate_tpValor(self.ValorCredito) + self.ISSRetido = ISSRetido + self.CPFCNPJTomador = CPFCNPJTomador + self.InscricaoMunicipalTomador = InscricaoMunicipalTomador + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalTomador) + self.InscricaoEstadualTomador = InscricaoEstadualTomador + self.validate_tpInscricaoEstadual(self.InscricaoEstadualTomador) + self.RazaoSocialTomador = RazaoSocialTomador + self.validate_tpRazaoSocial(self.RazaoSocialTomador) + self.EnderecoTomador = EnderecoTomador + self.EmailTomador = EmailTomador + self.validate_tpEmail(self.EmailTomador) + self.CPFCNPJIntermediario = CPFCNPJIntermediario + self.InscricaoMunicipalIntermediario = InscricaoMunicipalIntermediario + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalIntermediario) + self.ISSRetidoIntermediario = ISSRetidoIntermediario + self.EmailIntermediario = EmailIntermediario + self.validate_tpEmail(self.EmailIntermediario) + self.Discriminacao = Discriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + self.ValorCargaTributaria = ValorCargaTributaria + self.validate_tpValor(self.ValorCargaTributaria) + self.PercentualCargaTributaria = PercentualCargaTributaria + self.validate_tpPercentualCargaTributaria(self.PercentualCargaTributaria) + self.FonteCargaTributaria = FonteCargaTributaria + self.validate_tpFonteCargaTributaria(self.FonteCargaTributaria) + self.CodigoCEI = CodigoCEI + self.validate_tpNumero(self.CodigoCEI) + self.MatriculaObra = MatriculaObra + self.validate_tpNumero(self.MatriculaObra) + self.MunicipioPrestacao = MunicipioPrestacao + self.validate_tpCidade(self.MunicipioPrestacao) + self.NumeroEncapsulamento = NumeroEncapsulamento + self.validate_tpNumero(self.NumeroEncapsulamento) + self.ValorTotalRecebido = ValorTotalRecebido + self.validate_tpValor(self.ValorTotalRecebido) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpNFe) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpNFe.subclass: + return tpNFe.subclass(*args_, **kwargs_) + else: + return tpNFe(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpAssinatura(self, value): + # Validate type tpAssinatura, a restriction on xs:base64Binary. + if value is not None and Validate_simpletypes_: + pass + def validate_tpNumero(self, value): + # Validate type tpNumero, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpNumero_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, )) + validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']] + def validate_tpTipoRPS(self, value): + # Validate type tpTipoRPS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['RPS', 'RPS-M', 'RPS-C'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} ) + def validate_tpRazaoSocial(self, value): + # Validate type tpRazaoSocial, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + def validate_tpEmail(self, value): + # Validate type tpEmail, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + def validate_tpStatusNFe(self, value): + # Validate type tpStatusNFe, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['N', 'C', 'E'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpStatusNFe' % {"value" : value.encode("utf-8")} ) + def validate_tpTributacaoNFe(self, value): + # Validate type tpTributacaoNFe, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 1: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTributacaoNFe' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTributacaoNFe' % {"value" : value.encode("utf-8")} ) + def validate_tpOpcaoSimples(self, value): + # Validate type tpOpcaoSimples, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['0', '1', '2', '3'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpOpcaoSimples' % {"value" : value.encode("utf-8")} ) + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def validate_tpCodigoServico(self, value): + # Validate type tpCodigoServico, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoServico_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoServico_patterns_, )) + validate_tpCodigoServico_patterns_ = [['^[0-9]{4,5}$']] + def validate_tpAliquota(self, value): + # Validate type tpAliquota, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} ) + if len(str(value)) >= 5: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} ) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpInscricaoEstadual(self, value): + # Validate type tpInscricaoEstadual, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoEstadual_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoEstadual_patterns_, )) + validate_tpInscricaoEstadual_patterns_ = [['^[0-9]{1,19}$']] + def validate_tpDiscriminacao(self, value): + # Validate type tpDiscriminacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2000: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + def validate_tpPercentualCargaTributaria(self, value): + # Validate type tpPercentualCargaTributaria, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpPercentualCargaTributaria' % {"value" : value} ) + if len(str(value)) >= 7: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpPercentualCargaTributaria' % {"value" : value} ) + def validate_tpFonteCargaTributaria(self, value): + # Validate type tpFonteCargaTributaria, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 10: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpFonteCargaTributaria' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpFonteCargaTributaria' % {"value" : value.encode("utf-8")} ) + def validate_tpCidade(self, value): + # Validate type tpCidade, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCidade_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCidade_patterns_, )) + validate_tpCidade_patterns_ = [['^[0-9]{7}$']] + def hasContent_(self): + if ( + self.Assinatura is not None or + self.ChaveNFe is not None or + self.DataEmissaoNFe is not None or + self.NumeroLote is not None or + self.ChaveRPS is not None or + self.TipoRPS is not None or + self.DataEmissaoRPS is not None or + self.CPFCNPJPrestador is not None or + self.RazaoSocialPrestador is not None or + self.EnderecoPrestador is not None or + self.EmailPrestador is not None or + self.StatusNFe is not None or + self.DataCancelamento is not None or + self.TributacaoNFe is not None or + self.OpcaoSimples is not None or + self.NumeroGuia is not None or + self.DataQuitacaoGuia is not None or + self.ValorServicos is not None or + self.ValorDeducoes is not None or + self.ValorPIS is not None or + self.ValorCOFINS is not None or + self.ValorINSS is not None or + self.ValorIR is not None or + self.ValorCSLL is not None or + self.CodigoServico is not None or + self.AliquotaServicos is not None or + self.ValorISS is not None or + self.ValorCredito is not None or + self.ISSRetido is not None or + self.CPFCNPJTomador is not None or + self.InscricaoMunicipalTomador is not None or + self.InscricaoEstadualTomador is not None or + self.RazaoSocialTomador is not None or + self.EnderecoTomador is not None or + self.EmailTomador is not None or + self.CPFCNPJIntermediario is not None or + self.InscricaoMunicipalIntermediario is not None or + self.ISSRetidoIntermediario is not None or + self.EmailIntermediario is not None or + self.Discriminacao is not None or + self.ValorCargaTributaria is not None or + self.PercentualCargaTributaria is not None or + self.FonteCargaTributaria is not None or + self.CodigoCEI is not None or + self.MatriculaObra is not None or + self.MunicipioPrestacao is not None or + self.NumeroEncapsulamento is not None or + self.ValorTotalRecebido is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFe', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpNFe') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpNFe') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpNFe', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpNFe'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpNFe', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Assinatura is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssinatura>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.Assinatura, input_name='Assinatura'), namespaceprefix_ , eol_)) + if self.ChaveNFe is not None: + self.ChaveNFe.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveNFe', pretty_print=pretty_print) + if self.DataEmissaoNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataEmissaoNFe>%s%s' % (namespaceprefix_ , self.gds_format_datetime(self.DataEmissaoNFe, input_name='DataEmissaoNFe'), namespaceprefix_ , eol_)) + if self.NumeroLote is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroLote>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroLote, input_name='NumeroLote'), namespaceprefix_ , eol_)) + if self.ChaveRPS is not None: + self.ChaveRPS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveRPS', pretty_print=pretty_print) + if self.TipoRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoRPS), input_name='TipoRPS')), namespaceprefix_ , eol_)) + if self.DataEmissaoRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataEmissaoRPS>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataEmissaoRPS, input_name='DataEmissaoRPS'), namespaceprefix_ , eol_)) + if self.CPFCNPJPrestador is not None: + self.CPFCNPJPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJPrestador', pretty_print=pretty_print) + if self.RazaoSocialPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRazaoSocialPrestador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialPrestador), input_name='RazaoSocialPrestador')), namespaceprefix_ , eol_)) + if self.EnderecoPrestador is not None: + self.EnderecoPrestador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EnderecoPrestador', pretty_print=pretty_print) + if self.EmailPrestador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailPrestador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailPrestador), input_name='EmailPrestador')), namespaceprefix_ , eol_)) + if self.StatusNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusNFe>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusNFe), input_name='StatusNFe')), namespaceprefix_ , eol_)) + if self.DataCancelamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataCancelamento>%s%s' % (namespaceprefix_ , self.gds_format_datetime(self.DataCancelamento, input_name='DataCancelamento'), namespaceprefix_ , eol_)) + if self.TributacaoNFe is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTributacaoNFe>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TributacaoNFe), input_name='TributacaoNFe')), namespaceprefix_ , eol_)) + if self.OpcaoSimples is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sOpcaoSimples>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.OpcaoSimples), input_name='OpcaoSimples')), namespaceprefix_ , eol_)) + if self.NumeroGuia is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroGuia>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroGuia, input_name='NumeroGuia'), namespaceprefix_ , eol_)) + if self.DataQuitacaoGuia is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataQuitacaoGuia>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataQuitacaoGuia, input_name='DataQuitacaoGuia'), namespaceprefix_ , eol_)) + if self.ValorServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), namespaceprefix_ , eol_)) + if self.ValorDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), namespaceprefix_ , eol_)) + if self.ValorPIS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorPIS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorPIS, input_name='ValorPIS'), namespaceprefix_ , eol_)) + if self.ValorCOFINS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCOFINS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCOFINS, input_name='ValorCOFINS'), namespaceprefix_ , eol_)) + if self.ValorINSS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorINSS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorINSS, input_name='ValorINSS'), namespaceprefix_ , eol_)) + if self.ValorIR is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorIR>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorIR, input_name='ValorIR'), namespaceprefix_ , eol_)) + if self.ValorCSLL is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCSLL>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCSLL, input_name='ValorCSLL'), namespaceprefix_ , eol_)) + if self.CodigoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + if self.AliquotaServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAliquotaServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.AliquotaServicos, input_name='AliquotaServicos'), namespaceprefix_ , eol_)) + if self.ValorISS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorISS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorISS, input_name='ValorISS'), namespaceprefix_ , eol_)) + if self.ValorCredito is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCredito>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCredito, input_name='ValorCredito'), namespaceprefix_ , eol_)) + if self.ISSRetido is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetido>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetido, input_name='ISSRetido'), namespaceprefix_ , eol_)) + if self.CPFCNPJTomador is not None: + self.CPFCNPJTomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJTomador', pretty_print=pretty_print) + if self.InscricaoMunicipalTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipalTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipalTomador, input_name='InscricaoMunicipalTomador'), namespaceprefix_ , eol_)) + if self.InscricaoEstadualTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoEstadualTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoEstadualTomador, input_name='InscricaoEstadualTomador'), namespaceprefix_ , eol_)) + if self.RazaoSocialTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRazaoSocialTomador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialTomador), input_name='RazaoSocialTomador')), namespaceprefix_ , eol_)) + if self.EnderecoTomador is not None: + self.EnderecoTomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EnderecoTomador', pretty_print=pretty_print) + if self.EmailTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailTomador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailTomador), input_name='EmailTomador')), namespaceprefix_ , eol_)) + if self.CPFCNPJIntermediario is not None: + self.CPFCNPJIntermediario.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJIntermediario', pretty_print=pretty_print) + if self.InscricaoMunicipalIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipalIntermediario>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipalIntermediario, input_name='InscricaoMunicipalIntermediario'), namespaceprefix_ , eol_)) + if self.ISSRetidoIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoIntermediario>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ISSRetidoIntermediario), input_name='ISSRetidoIntermediario')), namespaceprefix_ , eol_)) + if self.EmailIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailIntermediario>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailIntermediario), input_name='EmailIntermediario')), namespaceprefix_ , eol_)) + if self.Discriminacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDiscriminacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), namespaceprefix_ , eol_)) + if self.ValorCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCargaTributaria, input_name='ValorCargaTributaria'), namespaceprefix_ , eol_)) + if self.PercentualCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sPercentualCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PercentualCargaTributaria, input_name='PercentualCargaTributaria'), namespaceprefix_ , eol_)) + if self.FonteCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sFonteCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FonteCargaTributaria), input_name='FonteCargaTributaria')), namespaceprefix_ , eol_)) + if self.CodigoCEI is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoCEI>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoCEI, input_name='CodigoCEI'), namespaceprefix_ , eol_)) + if self.MatriculaObra is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sMatriculaObra>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MatriculaObra, input_name='MatriculaObra'), namespaceprefix_ , eol_)) + if self.MunicipioPrestacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sMunicipioPrestacao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MunicipioPrestacao, input_name='MunicipioPrestacao'), namespaceprefix_ , eol_)) + if self.NumeroEncapsulamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroEncapsulamento>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroEncapsulamento, input_name='NumeroEncapsulamento'), namespaceprefix_ , eol_)) + if self.ValorTotalRecebido is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalRecebido>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalRecebido, input_name='ValorTotalRecebido'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Assinatura': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'Assinatura') + else: + bval_ = None + self.Assinatura = bval_ + # validate type tpAssinatura + self.validate_tpAssinatura(self.Assinatura) + elif nodeName_ == 'ChaveNFe': + obj_ = tpChaveNFe.factory(parent_object_=self) + obj_.build(child_) + self.ChaveNFe = obj_ + obj_.original_tagname_ = 'ChaveNFe' + elif nodeName_ == 'DataEmissaoNFe': + sval_ = child_.text + dval_ = self.gds_parse_datetime(sval_) + self.DataEmissaoNFe = dval_ + elif nodeName_ == 'NumeroLote' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroLote') + self.NumeroLote = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroLote) + elif nodeName_ == 'ChaveRPS': + obj_ = tpChaveRPS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveRPS = obj_ + obj_.original_tagname_ = 'ChaveRPS' + elif nodeName_ == 'TipoRPS': + TipoRPS_ = child_.text + TipoRPS_ = self.gds_validate_string(TipoRPS_, node, 'TipoRPS') + self.TipoRPS = TipoRPS_ + # validate type tpTipoRPS + self.validate_tpTipoRPS(self.TipoRPS) + elif nodeName_ == 'DataEmissaoRPS': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataEmissaoRPS = dval_ + elif nodeName_ == 'CPFCNPJPrestador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJPrestador = obj_ + obj_.original_tagname_ = 'CPFCNPJPrestador' + elif nodeName_ == 'RazaoSocialPrestador': + RazaoSocialPrestador_ = child_.text + RazaoSocialPrestador_ = self.gds_validate_string(RazaoSocialPrestador_, node, 'RazaoSocialPrestador') + self.RazaoSocialPrestador = RazaoSocialPrestador_ + # validate type tpRazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocialPrestador) + elif nodeName_ == 'EnderecoPrestador': + obj_ = tpEndereco.factory(parent_object_=self) + obj_.build(child_) + self.EnderecoPrestador = obj_ + obj_.original_tagname_ = 'EnderecoPrestador' + elif nodeName_ == 'EmailPrestador': + EmailPrestador_ = child_.text + EmailPrestador_ = self.gds_validate_string(EmailPrestador_, node, 'EmailPrestador') + self.EmailPrestador = EmailPrestador_ + # validate type tpEmail + self.validate_tpEmail(self.EmailPrestador) + elif nodeName_ == 'StatusNFe': + StatusNFe_ = child_.text + StatusNFe_ = self.gds_validate_string(StatusNFe_, node, 'StatusNFe') + self.StatusNFe = StatusNFe_ + # validate type tpStatusNFe + self.validate_tpStatusNFe(self.StatusNFe) + elif nodeName_ == 'DataCancelamento': + sval_ = child_.text + dval_ = self.gds_parse_datetime(sval_) + self.DataCancelamento = dval_ + elif nodeName_ == 'TributacaoNFe': + TributacaoNFe_ = child_.text + TributacaoNFe_ = self.gds_validate_string(TributacaoNFe_, node, 'TributacaoNFe') + self.TributacaoNFe = TributacaoNFe_ + # validate type tpTributacaoNFe + self.validate_tpTributacaoNFe(self.TributacaoNFe) + elif nodeName_ == 'OpcaoSimples': + OpcaoSimples_ = child_.text + OpcaoSimples_ = self.gds_validate_string(OpcaoSimples_, node, 'OpcaoSimples') + self.OpcaoSimples = OpcaoSimples_ + # validate type tpOpcaoSimples + self.validate_tpOpcaoSimples(self.OpcaoSimples) + elif nodeName_ == 'NumeroGuia' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroGuia') + self.NumeroGuia = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroGuia) + elif nodeName_ == 'DataQuitacaoGuia': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataQuitacaoGuia = dval_ + elif nodeName_ == 'ValorServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorServicos') + self.ValorServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorServicos) + elif nodeName_ == 'ValorDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes') + self.ValorDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorDeducoes) + elif nodeName_ == 'ValorPIS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorPIS') + self.ValorPIS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorPIS) + elif nodeName_ == 'ValorCOFINS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCOFINS') + self.ValorCOFINS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCOFINS) + elif nodeName_ == 'ValorINSS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorINSS') + self.ValorINSS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorINSS) + elif nodeName_ == 'ValorIR' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorIR') + self.ValorIR = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorIR) + elif nodeName_ == 'ValorCSLL' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCSLL') + self.ValorCSLL = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCSLL) + elif nodeName_ == 'CodigoServico' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoServico') + self.CodigoServico = ival_ + # validate type tpCodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + elif nodeName_ == 'AliquotaServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'AliquotaServicos') + self.AliquotaServicos = fval_ + # validate type tpAliquota + self.validate_tpAliquota(self.AliquotaServicos) + elif nodeName_ == 'ValorISS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorISS') + self.ValorISS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorISS) + elif nodeName_ == 'ValorCredito' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCredito') + self.ValorCredito = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCredito) + elif nodeName_ == 'ISSRetido': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetido') + self.ISSRetido = ival_ + elif nodeName_ == 'CPFCNPJTomador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJTomador = obj_ + obj_.original_tagname_ = 'CPFCNPJTomador' + elif nodeName_ == 'InscricaoMunicipalTomador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalTomador') + self.InscricaoMunicipalTomador = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalTomador) + elif nodeName_ == 'InscricaoEstadualTomador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoEstadualTomador') + self.InscricaoEstadualTomador = ival_ + # validate type tpInscricaoEstadual + self.validate_tpInscricaoEstadual(self.InscricaoEstadualTomador) + elif nodeName_ == 'RazaoSocialTomador': + RazaoSocialTomador_ = child_.text + RazaoSocialTomador_ = self.gds_validate_string(RazaoSocialTomador_, node, 'RazaoSocialTomador') + self.RazaoSocialTomador = RazaoSocialTomador_ + # validate type tpRazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocialTomador) + elif nodeName_ == 'EnderecoTomador': + obj_ = tpEndereco.factory(parent_object_=self) + obj_.build(child_) + self.EnderecoTomador = obj_ + obj_.original_tagname_ = 'EnderecoTomador' + elif nodeName_ == 'EmailTomador': + EmailTomador_ = child_.text + EmailTomador_ = self.gds_validate_string(EmailTomador_, node, 'EmailTomador') + self.EmailTomador = EmailTomador_ + # validate type tpEmail + self.validate_tpEmail(self.EmailTomador) + elif nodeName_ == 'CPFCNPJIntermediario': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJIntermediario = obj_ + obj_.original_tagname_ = 'CPFCNPJIntermediario' + elif nodeName_ == 'InscricaoMunicipalIntermediario' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalIntermediario') + self.InscricaoMunicipalIntermediario = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalIntermediario) + elif nodeName_ == 'ISSRetidoIntermediario': + ISSRetidoIntermediario_ = child_.text + ISSRetidoIntermediario_ = self.gds_validate_string(ISSRetidoIntermediario_, node, 'ISSRetidoIntermediario') + self.ISSRetidoIntermediario = ISSRetidoIntermediario_ + elif nodeName_ == 'EmailIntermediario': + EmailIntermediario_ = child_.text + EmailIntermediario_ = self.gds_validate_string(EmailIntermediario_, node, 'EmailIntermediario') + self.EmailIntermediario = EmailIntermediario_ + # validate type tpEmail + self.validate_tpEmail(self.EmailIntermediario) + elif nodeName_ == 'Discriminacao': + Discriminacao_ = child_.text + Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao') + self.Discriminacao = Discriminacao_ + # validate type tpDiscriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + elif nodeName_ == 'ValorCargaTributaria' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCargaTributaria') + self.ValorCargaTributaria = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCargaTributaria) + elif nodeName_ == 'PercentualCargaTributaria' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'PercentualCargaTributaria') + self.PercentualCargaTributaria = fval_ + # validate type tpPercentualCargaTributaria + self.validate_tpPercentualCargaTributaria(self.PercentualCargaTributaria) + elif nodeName_ == 'FonteCargaTributaria': + FonteCargaTributaria_ = child_.text + FonteCargaTributaria_ = self.gds_validate_string(FonteCargaTributaria_, node, 'FonteCargaTributaria') + self.FonteCargaTributaria = FonteCargaTributaria_ + # validate type tpFonteCargaTributaria + self.validate_tpFonteCargaTributaria(self.FonteCargaTributaria) + elif nodeName_ == 'CodigoCEI' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoCEI') + self.CodigoCEI = ival_ + # validate type tpNumero + self.validate_tpNumero(self.CodigoCEI) + elif nodeName_ == 'MatriculaObra' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'MatriculaObra') + self.MatriculaObra = ival_ + # validate type tpNumero + self.validate_tpNumero(self.MatriculaObra) + elif nodeName_ == 'MunicipioPrestacao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'MunicipioPrestacao') + self.MunicipioPrestacao = ival_ + # validate type tpCidade + self.validate_tpCidade(self.MunicipioPrestacao) + elif nodeName_ == 'NumeroEncapsulamento' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroEncapsulamento') + self.NumeroEncapsulamento = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroEncapsulamento) + elif nodeName_ == 'ValorTotalRecebido' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalRecebido') + self.ValorTotalRecebido = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalRecebido) +# end class tpNFe + + +class tpRPS(GeneratedsSuper): + """Tipo que representa um RPS.""" + subclass = None + superclass = None + def __init__(self, Assinatura=None, ChaveRPS=None, TipoRPS=None, DataEmissao=None, StatusRPS=None, TributacaoRPS=None, ValorServicos=None, ValorDeducoes=None, ValorPIS=None, ValorCOFINS=None, ValorINSS=None, ValorIR=None, ValorCSLL=None, CodigoServico=None, AliquotaServicos=None, ISSRetido=None, CPFCNPJTomador=None, InscricaoMunicipalTomador=None, InscricaoEstadualTomador=None, RazaoSocialTomador=None, EnderecoTomador=None, EmailTomador=None, CPFCNPJIntermediario=None, InscricaoMunicipalIntermediario=None, ISSRetidoIntermediario=None, EmailIntermediario=None, Discriminacao=None, ValorCargaTributaria=None, PercentualCargaTributaria=None, FonteCargaTributaria=None, CodigoCEI=None, MatriculaObra=None, MunicipioPrestacao=None, NumeroEncapsulamento=None, ValorTotalRecebido=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Assinatura = Assinatura + self.validate_tpAssinatura(self.Assinatura) + self.ChaveRPS = ChaveRPS + self.TipoRPS = TipoRPS + self.validate_tpTipoRPS(self.TipoRPS) + if isinstance(DataEmissao, BaseStrType_): + initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%d').date() + else: + initvalue_ = DataEmissao + self.DataEmissao = initvalue_ + self.StatusRPS = StatusRPS + self.validate_tpStatusNFe(self.StatusRPS) + self.TributacaoRPS = TributacaoRPS + self.validate_tpTributacaoNFe(self.TributacaoRPS) + self.ValorServicos = ValorServicos + self.validate_tpValor(self.ValorServicos) + self.ValorDeducoes = ValorDeducoes + self.validate_tpValor(self.ValorDeducoes) + self.ValorPIS = ValorPIS + self.validate_tpValor(self.ValorPIS) + self.ValorCOFINS = ValorCOFINS + self.validate_tpValor(self.ValorCOFINS) + self.ValorINSS = ValorINSS + self.validate_tpValor(self.ValorINSS) + self.ValorIR = ValorIR + self.validate_tpValor(self.ValorIR) + self.ValorCSLL = ValorCSLL + self.validate_tpValor(self.ValorCSLL) + self.CodigoServico = CodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + self.AliquotaServicos = AliquotaServicos + self.validate_tpAliquota(self.AliquotaServicos) + self.ISSRetido = ISSRetido + self.CPFCNPJTomador = CPFCNPJTomador + self.InscricaoMunicipalTomador = InscricaoMunicipalTomador + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalTomador) + self.InscricaoEstadualTomador = InscricaoEstadualTomador + self.validate_tpInscricaoEstadual(self.InscricaoEstadualTomador) + self.RazaoSocialTomador = RazaoSocialTomador + self.validate_tpRazaoSocial(self.RazaoSocialTomador) + self.EnderecoTomador = EnderecoTomador + self.EmailTomador = EmailTomador + self.validate_tpEmail(self.EmailTomador) + self.CPFCNPJIntermediario = CPFCNPJIntermediario + self.InscricaoMunicipalIntermediario = InscricaoMunicipalIntermediario + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalIntermediario) + self.ISSRetidoIntermediario = ISSRetidoIntermediario + self.EmailIntermediario = EmailIntermediario + self.validate_tpEmail(self.EmailIntermediario) + self.Discriminacao = Discriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + self.ValorCargaTributaria = ValorCargaTributaria + self.validate_tpValor(self.ValorCargaTributaria) + self.PercentualCargaTributaria = PercentualCargaTributaria + self.validate_tpPercentualCargaTributaria(self.PercentualCargaTributaria) + self.FonteCargaTributaria = FonteCargaTributaria + self.validate_tpFonteCargaTributaria(self.FonteCargaTributaria) + self.CodigoCEI = CodigoCEI + self.validate_tpNumero(self.CodigoCEI) + self.MatriculaObra = MatriculaObra + self.validate_tpNumero(self.MatriculaObra) + self.MunicipioPrestacao = MunicipioPrestacao + self.validate_tpCidade(self.MunicipioPrestacao) + self.NumeroEncapsulamento = NumeroEncapsulamento + self.validate_tpNumero(self.NumeroEncapsulamento) + self.ValorTotalRecebido = ValorTotalRecebido + self.validate_tpValor(self.ValorTotalRecebido) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, tpRPS) + if subclass is not None: + return subclass(*args_, **kwargs_) + if tpRPS.subclass: + return tpRPS.subclass(*args_, **kwargs_) + else: + return tpRPS(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_tpAssinatura(self, value): + # Validate type tpAssinatura, a restriction on xs:base64Binary. + if value is not None and Validate_simpletypes_: + pass + def validate_tpTipoRPS(self, value): + # Validate type tpTipoRPS, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['RPS', 'RPS-M', 'RPS-C'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpTipoRPS' % {"value" : value.encode("utf-8")} ) + def validate_tpStatusNFe(self, value): + # Validate type tpStatusNFe, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + value = str(value) + enumerations = ['N', 'C', 'E'] + enumeration_respectee = False + for enum in enumerations: + if value == enum: + enumeration_respectee = True + break + if not enumeration_respectee: + warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpStatusNFe' % {"value" : value.encode("utf-8")} ) + def validate_tpTributacaoNFe(self, value): + # Validate type tpTributacaoNFe, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 1: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTributacaoNFe' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpTributacaoNFe' % {"value" : value.encode("utf-8")} ) + def validate_tpValor(self, value): + # Validate type tpValor, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpValor' % {"value" : value} ) + if len(str(value)) >= 15: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) + if not self.gds_validate_simple_patterns( + self.validate_tpValor_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] + def validate_tpCodigoServico(self, value): + # Validate type tpCodigoServico, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCodigoServico_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCodigoServico_patterns_, )) + validate_tpCodigoServico_patterns_ = [['^[0-9]{4,5}$']] + def validate_tpAliquota(self, value): + # Validate type tpAliquota, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpAliquota' % {"value" : value} ) + if len(str(value)) >= 5: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpAliquota' % {"value" : value} ) + def validate_tpInscricaoMunicipal(self, value): + # Validate type tpInscricaoMunicipal, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoMunicipal_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoMunicipal_patterns_, )) + validate_tpInscricaoMunicipal_patterns_ = [['^[0-9]{8,8}$']] + def validate_tpInscricaoEstadual(self, value): + # Validate type tpInscricaoEstadual, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpInscricaoEstadual_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpInscricaoEstadual_patterns_, )) + validate_tpInscricaoEstadual_patterns_ = [['^[0-9]{1,19}$']] + def validate_tpRazaoSocial(self, value): + # Validate type tpRazaoSocial, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpRazaoSocial' % {"value" : value.encode("utf-8")} ) + def validate_tpEmail(self, value): + # Validate type tpEmail, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 75: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpEmail' % {"value" : value.encode("utf-8")} ) + def validate_tpDiscriminacao(self, value): + # Validate type tpDiscriminacao, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 2000: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + if len(value) < 1: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpDiscriminacao' % {"value" : value.encode("utf-8")} ) + def validate_tpPercentualCargaTributaria(self, value): + # Validate type tpPercentualCargaTributaria, a restriction on xs:decimal. + if value is not None and Validate_simpletypes_: + if value < 0: + warnings_.warn('Value "%(value)s" does not match xsd minInclusive restriction on tpPercentualCargaTributaria' % {"value" : value} ) + if len(str(value)) >= 7: + warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpPercentualCargaTributaria' % {"value" : value} ) + def validate_tpFonteCargaTributaria(self, value): + # Validate type tpFonteCargaTributaria, a restriction on xs:string. + if value is not None and Validate_simpletypes_: + if len(value) > 10: + warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpFonteCargaTributaria' % {"value" : value.encode("utf-8")} ) + if len(value) < 0: + warnings_.warn('Value "%(value)s" does not match xsd minLength restriction on tpFonteCargaTributaria' % {"value" : value.encode("utf-8")} ) + def validate_tpNumero(self, value): + # Validate type tpNumero, a restriction on xs:long. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpNumero_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpNumero_patterns_, )) + validate_tpNumero_patterns_ = [['^[0-9]{1,12}$']] + def validate_tpCidade(self, value): + # Validate type tpCidade, a restriction on xs:int. + if value is not None and Validate_simpletypes_: + if not self.gds_validate_simple_patterns( + self.validate_tpCidade_patterns_, value): + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpCidade_patterns_, )) + validate_tpCidade_patterns_ = [['^[0-9]{7}$']] + def hasContent_(self): + if ( + self.Assinatura is not None or + self.ChaveRPS is not None or + self.TipoRPS is not None or + self.DataEmissao is not None or + self.StatusRPS is not None or + self.TributacaoRPS is not None or + self.ValorServicos is not None or + self.ValorDeducoes is not None or + self.ValorPIS is not None or + self.ValorCOFINS is not None or + self.ValorINSS is not None or + self.ValorIR is not None or + self.ValorCSLL is not None or + self.CodigoServico is not None or + self.AliquotaServicos is not None or + self.ISSRetido is not None or + self.CPFCNPJTomador is not None or + self.InscricaoMunicipalTomador is not None or + self.InscricaoEstadualTomador is not None or + self.RazaoSocialTomador is not None or + self.EnderecoTomador is not None or + self.EmailTomador is not None or + self.CPFCNPJIntermediario is not None or + self.InscricaoMunicipalIntermediario is not None or + self.ISSRetidoIntermediario is not None or + self.EmailIntermediario is not None or + self.Discriminacao is not None or + self.ValorCargaTributaria is not None or + self.PercentualCargaTributaria is not None or + self.FonteCargaTributaria is not None or + self.CodigoCEI is not None or + self.MatriculaObra is not None or + self.MunicipioPrestacao is not None or + self.NumeroEncapsulamento is not None or + self.ValorTotalRecebido is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRPS', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRPS') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='tpRPS') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='tpRPS', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='tpRPS'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRPS', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Assinatura is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAssinatura>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.Assinatura, input_name='Assinatura'), namespaceprefix_ , eol_)) + if self.ChaveRPS is not None: + self.ChaveRPS.export(outfile, level, namespaceprefix_, namespacedef_='', name_='ChaveRPS', pretty_print=pretty_print) + if self.TipoRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTipoRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TipoRPS), input_name='TipoRPS')), namespaceprefix_ , eol_)) + if self.DataEmissao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDataEmissao>%s%s' % (namespaceprefix_ , self.gds_format_date(self.DataEmissao, input_name='DataEmissao'), namespaceprefix_ , eol_)) + if self.StatusRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sStatusRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.StatusRPS), input_name='StatusRPS')), namespaceprefix_ , eol_)) + if self.TributacaoRPS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sTributacaoRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TributacaoRPS), input_name='TributacaoRPS')), namespaceprefix_ , eol_)) + if self.ValorServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), namespaceprefix_ , eol_)) + if self.ValorDeducoes is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), namespaceprefix_ , eol_)) + if self.ValorPIS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorPIS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorPIS, input_name='ValorPIS'), namespaceprefix_ , eol_)) + if self.ValorCOFINS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCOFINS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCOFINS, input_name='ValorCOFINS'), namespaceprefix_ , eol_)) + if self.ValorINSS is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorINSS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorINSS, input_name='ValorINSS'), namespaceprefix_ , eol_)) + if self.ValorIR is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorIR>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorIR, input_name='ValorIR'), namespaceprefix_ , eol_)) + if self.ValorCSLL is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCSLL>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCSLL, input_name='ValorCSLL'), namespaceprefix_ , eol_)) + if self.CodigoServico is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + if self.AliquotaServicos is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sAliquotaServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.AliquotaServicos, input_name='AliquotaServicos'), namespaceprefix_ , eol_)) + if self.ISSRetido is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetido>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.ISSRetido, input_name='ISSRetido'), namespaceprefix_ , eol_)) + if self.CPFCNPJTomador is not None: + self.CPFCNPJTomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJTomador', pretty_print=pretty_print) + if self.InscricaoMunicipalTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipalTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipalTomador, input_name='InscricaoMunicipalTomador'), namespaceprefix_ , eol_)) + if self.InscricaoEstadualTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoEstadualTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoEstadualTomador, input_name='InscricaoEstadualTomador'), namespaceprefix_ , eol_)) + if self.RazaoSocialTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sRazaoSocialTomador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialTomador), input_name='RazaoSocialTomador')), namespaceprefix_ , eol_)) + if self.EnderecoTomador is not None: + self.EnderecoTomador.export(outfile, level, namespaceprefix_, namespacedef_='', name_='EnderecoTomador', pretty_print=pretty_print) + if self.EmailTomador is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailTomador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailTomador), input_name='EmailTomador')), namespaceprefix_ , eol_)) + if self.CPFCNPJIntermediario is not None: + self.CPFCNPJIntermediario.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CPFCNPJIntermediario', pretty_print=pretty_print) + if self.InscricaoMunicipalIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sInscricaoMunicipalIntermediario>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipalIntermediario, input_name='InscricaoMunicipalIntermediario'), namespaceprefix_ , eol_)) + if self.ISSRetidoIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sISSRetidoIntermediario>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ISSRetidoIntermediario), input_name='ISSRetidoIntermediario')), namespaceprefix_ , eol_)) + if self.EmailIntermediario is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sEmailIntermediario>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.EmailIntermediario), input_name='EmailIntermediario')), namespaceprefix_ , eol_)) + if self.Discriminacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDiscriminacao>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Discriminacao), input_name='Discriminacao')), namespaceprefix_ , eol_)) + if self.ValorCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCargaTributaria, input_name='ValorCargaTributaria'), namespaceprefix_ , eol_)) + if self.PercentualCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sPercentualCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_format_float(self.PercentualCargaTributaria, input_name='PercentualCargaTributaria'), namespaceprefix_ , eol_)) + if self.FonteCargaTributaria is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sFonteCargaTributaria>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.FonteCargaTributaria), input_name='FonteCargaTributaria')), namespaceprefix_ , eol_)) + if self.CodigoCEI is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sCodigoCEI>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoCEI, input_name='CodigoCEI'), namespaceprefix_ , eol_)) + if self.MatriculaObra is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sMatriculaObra>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MatriculaObra, input_name='MatriculaObra'), namespaceprefix_ , eol_)) + if self.MunicipioPrestacao is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sMunicipioPrestacao>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.MunicipioPrestacao, input_name='MunicipioPrestacao'), namespaceprefix_ , eol_)) + if self.NumeroEncapsulamento is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sNumeroEncapsulamento>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroEncapsulamento, input_name='NumeroEncapsulamento'), namespaceprefix_ , eol_)) + if self.ValorTotalRecebido is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sValorTotalRecebido>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorTotalRecebido, input_name='ValorTotalRecebido'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Assinatura': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'Assinatura') + else: + bval_ = None + self.Assinatura = bval_ + # validate type tpAssinatura + self.validate_tpAssinatura(self.Assinatura) + elif nodeName_ == 'ChaveRPS': + obj_ = tpChaveRPS.factory(parent_object_=self) + obj_.build(child_) + self.ChaveRPS = obj_ + obj_.original_tagname_ = 'ChaveRPS' + elif nodeName_ == 'TipoRPS': + TipoRPS_ = child_.text + TipoRPS_ = self.gds_validate_string(TipoRPS_, node, 'TipoRPS') + self.TipoRPS = TipoRPS_ + # validate type tpTipoRPS + self.validate_tpTipoRPS(self.TipoRPS) + elif nodeName_ == 'DataEmissao': + sval_ = child_.text + dval_ = self.gds_parse_date(sval_) + self.DataEmissao = dval_ + elif nodeName_ == 'StatusRPS': + StatusRPS_ = child_.text + StatusRPS_ = self.gds_validate_string(StatusRPS_, node, 'StatusRPS') + self.StatusRPS = StatusRPS_ + # validate type tpStatusNFe + self.validate_tpStatusNFe(self.StatusRPS) + elif nodeName_ == 'TributacaoRPS': + TributacaoRPS_ = child_.text + TributacaoRPS_ = self.gds_validate_string(TributacaoRPS_, node, 'TributacaoRPS') + self.TributacaoRPS = TributacaoRPS_ + # validate type tpTributacaoNFe + self.validate_tpTributacaoNFe(self.TributacaoRPS) + elif nodeName_ == 'ValorServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorServicos') + self.ValorServicos = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorServicos) + elif nodeName_ == 'ValorDeducoes' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorDeducoes') + self.ValorDeducoes = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorDeducoes) + elif nodeName_ == 'ValorPIS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorPIS') + self.ValorPIS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorPIS) + elif nodeName_ == 'ValorCOFINS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCOFINS') + self.ValorCOFINS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCOFINS) + elif nodeName_ == 'ValorINSS' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorINSS') + self.ValorINSS = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorINSS) + elif nodeName_ == 'ValorIR' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorIR') + self.ValorIR = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorIR) + elif nodeName_ == 'ValorCSLL' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCSLL') + self.ValorCSLL = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCSLL) + elif nodeName_ == 'CodigoServico' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoServico') + self.CodigoServico = ival_ + # validate type tpCodigoServico + self.validate_tpCodigoServico(self.CodigoServico) + elif nodeName_ == 'AliquotaServicos' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'AliquotaServicos') + self.AliquotaServicos = fval_ + # validate type tpAliquota + self.validate_tpAliquota(self.AliquotaServicos) + elif nodeName_ == 'ISSRetido': + sval_ = child_.text + if sval_ in ('true', '1'): + ival_ = True + elif sval_ in ('false', '0'): + ival_ = False + else: + raise_parse_error(child_, 'requires boolean') + ival_ = self.gds_validate_boolean(ival_, node, 'ISSRetido') + self.ISSRetido = ival_ + elif nodeName_ == 'CPFCNPJTomador': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJTomador = obj_ + obj_.original_tagname_ = 'CPFCNPJTomador' + elif nodeName_ == 'InscricaoMunicipalTomador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalTomador') + self.InscricaoMunicipalTomador = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalTomador) + elif nodeName_ == 'InscricaoEstadualTomador' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoEstadualTomador') + self.InscricaoEstadualTomador = ival_ + # validate type tpInscricaoEstadual + self.validate_tpInscricaoEstadual(self.InscricaoEstadualTomador) + elif nodeName_ == 'RazaoSocialTomador': + RazaoSocialTomador_ = child_.text + RazaoSocialTomador_ = self.gds_validate_string(RazaoSocialTomador_, node, 'RazaoSocialTomador') + self.RazaoSocialTomador = RazaoSocialTomador_ + # validate type tpRazaoSocial + self.validate_tpRazaoSocial(self.RazaoSocialTomador) + elif nodeName_ == 'EnderecoTomador': + obj_ = tpEndereco.factory(parent_object_=self) + obj_.build(child_) + self.EnderecoTomador = obj_ + obj_.original_tagname_ = 'EnderecoTomador' + elif nodeName_ == 'EmailTomador': + EmailTomador_ = child_.text + EmailTomador_ = self.gds_validate_string(EmailTomador_, node, 'EmailTomador') + self.EmailTomador = EmailTomador_ + # validate type tpEmail + self.validate_tpEmail(self.EmailTomador) + elif nodeName_ == 'CPFCNPJIntermediario': + obj_ = tpCPFCNPJ.factory(parent_object_=self) + obj_.build(child_) + self.CPFCNPJIntermediario = obj_ + obj_.original_tagname_ = 'CPFCNPJIntermediario' + elif nodeName_ == 'InscricaoMunicipalIntermediario' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'InscricaoMunicipalIntermediario') + self.InscricaoMunicipalIntermediario = ival_ + # validate type tpInscricaoMunicipal + self.validate_tpInscricaoMunicipal(self.InscricaoMunicipalIntermediario) + elif nodeName_ == 'ISSRetidoIntermediario': + ISSRetidoIntermediario_ = child_.text + ISSRetidoIntermediario_ = self.gds_validate_string(ISSRetidoIntermediario_, node, 'ISSRetidoIntermediario') + self.ISSRetidoIntermediario = ISSRetidoIntermediario_ + elif nodeName_ == 'EmailIntermediario': + EmailIntermediario_ = child_.text + EmailIntermediario_ = self.gds_validate_string(EmailIntermediario_, node, 'EmailIntermediario') + self.EmailIntermediario = EmailIntermediario_ + # validate type tpEmail + self.validate_tpEmail(self.EmailIntermediario) + elif nodeName_ == 'Discriminacao': + Discriminacao_ = child_.text + Discriminacao_ = self.gds_validate_string(Discriminacao_, node, 'Discriminacao') + self.Discriminacao = Discriminacao_ + # validate type tpDiscriminacao + self.validate_tpDiscriminacao(self.Discriminacao) + elif nodeName_ == 'ValorCargaTributaria' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorCargaTributaria') + self.ValorCargaTributaria = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorCargaTributaria) + elif nodeName_ == 'PercentualCargaTributaria' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'PercentualCargaTributaria') + self.PercentualCargaTributaria = fval_ + # validate type tpPercentualCargaTributaria + self.validate_tpPercentualCargaTributaria(self.PercentualCargaTributaria) + elif nodeName_ == 'FonteCargaTributaria': + FonteCargaTributaria_ = child_.text + FonteCargaTributaria_ = self.gds_validate_string(FonteCargaTributaria_, node, 'FonteCargaTributaria') + self.FonteCargaTributaria = FonteCargaTributaria_ + # validate type tpFonteCargaTributaria + self.validate_tpFonteCargaTributaria(self.FonteCargaTributaria) + elif nodeName_ == 'CodigoCEI' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'CodigoCEI') + self.CodigoCEI = ival_ + # validate type tpNumero + self.validate_tpNumero(self.CodigoCEI) + elif nodeName_ == 'MatriculaObra' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'MatriculaObra') + self.MatriculaObra = ival_ + # validate type tpNumero + self.validate_tpNumero(self.MatriculaObra) + elif nodeName_ == 'MunicipioPrestacao' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'MunicipioPrestacao') + self.MunicipioPrestacao = ival_ + # validate type tpCidade + self.validate_tpCidade(self.MunicipioPrestacao) + elif nodeName_ == 'NumeroEncapsulamento' and child_.text: + sval_ = child_.text + try: + ival_ = int(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires integer: %s' % exp) + ival_ = self.gds_validate_integer(ival_, node, 'NumeroEncapsulamento') + self.NumeroEncapsulamento = ival_ + # validate type tpNumero + self.validate_tpNumero(self.NumeroEncapsulamento) + elif nodeName_ == 'ValorTotalRecebido' and child_.text: + sval_ = child_.text + try: + fval_ = float(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires float or double: %s' % exp) + fval_ = self.gds_validate_float(fval_, node, 'ValorTotalRecebido') + self.ValorTotalRecebido = fval_ + # validate type tpValor + self.validate_tpValor(self.ValorTotalRecebido) +# end class tpRPS + + +GDSClassesMapping = { +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:tipos="http://www.prefeitura.sp.gov.br/nfe/tipos"', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:tipos="http://www.prefeitura.sp.gov.br/nfe/tipos"') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'tpEvento' + rootClass = tpEvento + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from TiposNFe_v01 import *\n\n') + sys.stdout.write('import TiposNFe_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "tpCPFCNPJ", + "tpChaveNFe", + "tpChaveNFeRPS", + "tpChaveRPS", + "tpEndereco", + "tpEvento", + "tpInformacoesLote", + "tpNFe", + "tpRPS" +] diff --git a/nfselib/paulistana/v02/__init__.py b/nfselib/paulistana/v02/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/nfselib/paulistana/v02/xmldsig-core-schema_v01.py b/nfselib/paulistana/v02/xmldsig-core-schema_v01.py new file mode 100644 index 0000000..e823ca9 --- /dev/null +++ b/nfselib/paulistana/v02/xmldsig-core-schema_v01.py @@ -0,0 +1,2009 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# +# Generated Wed Jul 22 06:40:30 2020 by generateDS.py version 2.30.15. +# Python 3.8.3 (default, May 17 2020, 18:15:42) [GCC 10.1.0] +# +# Command line options: +# ('--no-namespace-defs', '') +# ('--no-collect-includes', '') +# ('--use-getter-setter', 'none') +# ('-f', '') +# ('-o', './paulistanalib/v02/xmldsig-core-schema_v01.py') +# +# Command line arguments: +# ./process_includes/xmldsig-core-schema_v01.xsd +# +# Command line: +# /home/mileo/Projects/oca12/bin/erpbrasil-edoc-gen-generate-python --no-namespace-defs --no-collect-includes --use-getter-setter="none" -f -o "./paulistanalib/v02/xmldsig-core-schema_v01.py" ./process_includes/xmldsig-core-schema_v01.xsd +# +# Current working directory (os.getcwd()): +# schemas +# + +import os +import sys +import re as re_ +import base64 +import datetime as datetime_ +import warnings as warnings_ +try: + from lxml import etree as etree_ +except ImportError: + from xml.etree import ElementTree as etree_ + + +Validate_simpletypes_ = True +if sys.version_info.major == 2: + BaseStrType_ = basestring +else: + BaseStrType_ = str + + +def parsexml_(infile, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + try: + if isinstance(infile, os.PathLike): + infile = os.path.join(infile) + except AttributeError: + pass + doc = etree_.parse(infile, parser=parser, **kwargs) + return doc + +def parsexmlstring_(instring, parser=None, **kwargs): + if parser is None: + # Use the lxml ElementTree compatible parser so that, e.g., + # we ignore comments. + try: + parser = etree_.ETCompatXMLParser() + except AttributeError: + # fallback to xml.etree + parser = etree_.XMLParser() + element = etree_.fromstring(instring, parser=parser, **kwargs) + return element + +# +# Namespace prefix definition table (and other attributes, too) +# +# The module generatedsnamespaces, if it is importable, must contain +# a dictionary named GeneratedsNamespaceDefs. This Python dictionary +# should map element type names (strings) to XML schema namespace prefix +# definitions. The export method for any class for which there is +# a namespace prefix definition, will export that definition in the +# XML representation of that element. See the export method of +# any generated element type class for an example of the use of this +# table. +# A sample table is: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceDefs = { +# "ElementtypeA": "http://www.xxx.com/namespaceA", +# "ElementtypeB": "http://www.xxx.com/namespaceB", +# } +# +# Additionally, the generatedsnamespaces module can contain a python +# dictionary named GenerateDSNamespaceTypePrefixes that associates element +# types with the namespace prefixes that are to be added to the +# "xsi:type" attribute value. See the exportAttributes method of +# any generated element type and the generation of "xsi:type" for an +# example of the use of this table. +# An example table: +# +# # File: generatedsnamespaces.py +# +# GenerateDSNamespaceTypePrefixes = { +# "ElementtypeC": "aaa:", +# "ElementtypeD": "bbb:", +# } +# + +try: + from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ +except ImportError: + GenerateDSNamespaceDefs_ = {} +try: + from generatedsnamespaces import GenerateDSNamespaceTypePrefixes as GenerateDSNamespaceTypePrefixes_ +except ImportError: + GenerateDSNamespaceTypePrefixes_ = {} + +# +# The root super-class for element type classes +# +# Calls to the methods in these classes are generated by generateDS.py. +# You can replace these methods by re-implementing the following class +# in a module named generatedssuper.py. + +try: + from generatedssuper import GeneratedsSuper +except ImportError as exp: + + class GeneratedsSuper(object): + tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$') + class _FixedOffsetTZ(datetime_.tzinfo): + def __init__(self, offset, name): + self.__offset = datetime_.timedelta(minutes=offset) + self.__name = name + def utcoffset(self, dt): + return self.__offset + def tzname(self, dt): + return self.__name + def dst(self, dt): + return None + def gds_format_string(self, input_data, input_name=''): + return input_data + def gds_validate_string(self, input_data, node=None, input_name=''): + if not input_data: + return '' + else: + return input_data + def gds_format_base64(self, input_data, input_name=''): + return base64.b64encode(input_data) + def gds_validate_base64(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer(self, input_data, input_name=''): + return '%d' % input_data + def gds_validate_integer(self, input_data, node=None, input_name=''): + return input_data + def gds_format_integer_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_integer_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + int(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of integers') + return values + def gds_format_float(self, input_data, input_name=''): + return ('%.15f' % input_data).rstrip('0') + def gds_validate_float(self, input_data, node=None, input_name=''): + return input_data + def gds_format_float_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_float_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of floats') + return values + def gds_format_double(self, input_data, input_name=''): + return '%e' % input_data + def gds_validate_double(self, input_data, node=None, input_name=''): + return input_data + def gds_format_double_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_double_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + try: + float(value) + except (TypeError, ValueError): + raise_parse_error(node, 'Requires sequence of doubles') + return values + def gds_format_boolean(self, input_data, input_name=''): + return ('%s' % input_data).lower() + def gds_validate_boolean(self, input_data, node=None, input_name=''): + return input_data + def gds_format_boolean_list(self, input_data, input_name=''): + return '%s' % ' '.join(input_data) + def gds_validate_boolean_list( + self, input_data, node=None, input_name=''): + values = input_data.split() + for value in values: + if value not in ('true', '1', 'false', '0', ): + raise_parse_error( + node, + 'Requires sequence of booleans ' + '("true", "1", "false", "0")') + return values + def gds_validate_datetime(self, input_data, node=None, input_name=''): + return input_data + def gds_format_datetime(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % ( + input_data.year, + input_data.month, + input_data.day, + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + @classmethod + def gds_parse_datetime(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + time_parts = input_data.split('.') + if len(time_parts) > 1: + micro_seconds = int(float('0.' + time_parts[1]) * 1000000) + input_data = '%s.%s' % ( + time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), ) + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime( + input_data, '%Y-%m-%dT%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt + def gds_validate_date(self, input_data, node=None, input_name=''): + return input_data + def gds_format_date(self, input_data, input_name=''): + _svalue = '%04d-%02d-%02d' % ( + input_data.year, + input_data.month, + input_data.day, + ) + try: + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format( + hours, minutes) + except AttributeError: + pass + return _svalue + @classmethod + def gds_parse_date(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d') + dt = dt.replace(tzinfo=tz) + return dt.date() + def gds_validate_time(self, input_data, node=None, input_name=''): + return input_data + def gds_format_time(self, input_data, input_name=''): + if input_data.microsecond == 0: + _svalue = '%02d:%02d:%02d' % ( + input_data.hour, + input_data.minute, + input_data.second, + ) + else: + _svalue = '%02d:%02d:%02d.%s' % ( + input_data.hour, + input_data.minute, + input_data.second, + ('%f' % (float(input_data.microsecond) / 1000000))[2:], + ) + if input_data.tzinfo is not None: + tzoff = input_data.tzinfo.utcoffset(input_data) + if tzoff is not None: + total_seconds = tzoff.seconds + (86400 * tzoff.days) + if total_seconds == 0: + _svalue += 'Z' + else: + if total_seconds < 0: + _svalue += '-' + total_seconds *= -1 + else: + _svalue += '+' + hours = total_seconds // 3600 + minutes = (total_seconds - (hours * 3600)) // 60 + _svalue += '{0:02d}:{1:02d}'.format(hours, minutes) + return _svalue + def gds_validate_simple_patterns(self, patterns, target): + # pat is a list of lists of strings/patterns. + # The target value must match at least one of the patterns + # in order for the test to succeed. + found1 = True + for patterns1 in patterns: + found2 = False + for patterns2 in patterns1: + mo = re_.search(patterns2, target) + if mo is not None and len(mo.group(0)) == len(target): + found2 = True + break + if not found2: + found1 = False + break + return found1 + @classmethod + def gds_parse_time(cls, input_data): + tz = None + if input_data[-1] == 'Z': + tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC') + input_data = input_data[:-1] + else: + results = GeneratedsSuper.tzoff_pattern.search(input_data) + if results is not None: + tzoff_parts = results.group(2).split(':') + tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1]) + if results.group(1) == '-': + tzoff *= -1 + tz = GeneratedsSuper._FixedOffsetTZ( + tzoff, results.group(0)) + input_data = input_data[:-6] + if len(input_data.split('.')) > 1: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f') + else: + dt = datetime_.datetime.strptime(input_data, '%H:%M:%S') + dt = dt.replace(tzinfo=tz) + return dt.time() + def gds_str_lower(self, instring): + return instring.lower() + def get_path_(self, node): + path_list = [] + self.get_path_list_(node, path_list) + path_list.reverse() + path = '/'.join(path_list) + return path + Tag_strip_pattern_ = re_.compile(r'\{.*\}') + def get_path_list_(self, node, path_list): + if node is None: + return + tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag) + if tag: + path_list.append(tag) + self.get_path_list_(node.getparent(), path_list) + def get_class_obj_(self, node, default_class=None): + class_obj1 = default_class + if 'xsi' in node.nsmap: + classname = node.get('{%s}type' % node.nsmap['xsi']) + if classname is not None: + names = classname.split(':') + if len(names) == 2: + classname = names[1] + class_obj2 = globals().get(classname) + if class_obj2 is not None: + class_obj1 = class_obj2 + return class_obj1 + def gds_build_any(self, node, type_name=None): + return None + @classmethod + def gds_reverse_node_mapping(cls, mapping): + return dict(((v, k) for k, v in mapping.items())) + @staticmethod + def gds_encode(instring): + if sys.version_info.major == 2: + if ExternalEncoding: + encoding = ExternalEncoding + else: + encoding = 'utf-8' + return instring.encode(encoding) + else: + return instring + @staticmethod + def convert_unicode(instring): + if isinstance(instring, str): + result = quote_xml(instring) + elif sys.version_info.major == 2 and isinstance(instring, unicode): + result = quote_xml(instring).encode('utf8') + else: + result = GeneratedsSuper.gds_encode(str(instring)) + return result + def __eq__(self, other): + if type(self) != type(other): + return False + return self.__dict__ == other.__dict__ + def __ne__(self, other): + return not self.__eq__(other) + + def getSubclassFromModule_(module, class_): + '''Get the subclass of a class from a specific module.''' + name = class_.__name__ + 'Sub' + if hasattr(module, name): + return getattr(module, name) + else: + return None + + +# +# If you have installed IPython you can uncomment and use the following. +# IPython is available from http://ipython.scipy.org/. +# + +## from IPython.Shell import IPShellEmbed +## args = '' +## ipshell = IPShellEmbed(args, +## banner = 'Dropping into IPython', +## exit_msg = 'Leaving Interpreter, back to program.') + +# Then use the following line where and when you want to drop into the +# IPython shell: +# ipshell(' -- Entering ipshell.\nHit Ctrl-D to exit') + +# +# Globals +# + +ExternalEncoding = '' +Tag_pattern_ = re_.compile(r'({.*})?(.*)') +String_cleanup_pat_ = re_.compile(r"[\n\r\s]+") +Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)') +CDATA_pattern_ = re_.compile(r"", re_.DOTALL) + +# Change this to redirect the generated superclass module to use a +# specific subclass module. +CurrentSubclassModule_ = None + +# +# Support/utility functions. +# + + +def showIndent(outfile, level, pretty_print=True): + if pretty_print: + for idx in range(level): + outfile.write(' ') + + +def quote_xml(inStr): + "Escape markup chars, but do not modify CDATA sections." + if not inStr: + return '' + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s2 = '' + pos = 0 + matchobjects = CDATA_pattern_.finditer(s1) + for mo in matchobjects: + s3 = s1[pos:mo.start()] + s2 += quote_xml_aux(s3) + s2 += s1[mo.start():mo.end()] + pos = mo.end() + s3 = s1[pos:] + s2 += quote_xml_aux(s3) + return s2 + + +def quote_xml_aux(inStr): + s1 = inStr.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + return s1 + + +def quote_attrib(inStr): + s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr) + s1 = s1.replace('&', '&') + s1 = s1.replace('<', '<') + s1 = s1.replace('>', '>') + if '"' in s1: + if "'" in s1: + s1 = '"%s"' % s1.replace('"', """) + else: + s1 = "'%s'" % s1 + else: + s1 = '"%s"' % s1 + return s1 + + +def quote_python(inStr): + s1 = inStr + if s1.find("'") == -1: + if s1.find('\n') == -1: + return "'%s'" % s1 + else: + return "'''%s'''" % s1 + else: + if s1.find('"') != -1: + s1 = s1.replace('"', '\\"') + if s1.find('\n') == -1: + return '"%s"' % s1 + else: + return '"""%s"""' % s1 + + +def get_all_text_(node): + if node.text is not None: + text = node.text + else: + text = '' + for child in node: + if child.tail is not None: + text += child.tail + return text + + +def find_attr_value_(attr_name, node): + attrs = node.attrib + attr_parts = attr_name.split(':') + value = None + if len(attr_parts) == 1: + value = attrs.get(attr_name) + elif len(attr_parts) == 2: + prefix, name = attr_parts + namespace = node.nsmap.get(prefix) + if namespace is not None: + value = attrs.get('{%s}%s' % (namespace, name, )) + return value + + +class GDSParseError(Exception): + pass + + +def raise_parse_error(node, msg): + msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, ) + raise GDSParseError(msg) + + +class MixedContainer: + # Constants for category: + CategoryNone = 0 + CategoryText = 1 + CategorySimple = 2 + CategoryComplex = 3 + # Constants for content_type: + TypeNone = 0 + TypeText = 1 + TypeString = 2 + TypeInteger = 3 + TypeFloat = 4 + TypeDecimal = 5 + TypeDouble = 6 + TypeBoolean = 7 + TypeBase64 = 8 + def __init__(self, category, content_type, name, value): + self.category = category + self.content_type = content_type + self.name = name + self.value = value + def getCategory(self): + return self.category + def getContenttype(self, content_type): + return self.content_type + def getValue(self): + return self.value + def getName(self): + return self.name + def export(self, outfile, level, name, namespace, + pretty_print=True): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + outfile.write(self.value) + elif self.category == MixedContainer.CategorySimple: + self.exportSimple(outfile, level, name) + else: # category == MixedContainer.CategoryComplex + self.value.export( + outfile, level, namespace, name_=name, + pretty_print=pretty_print) + def exportSimple(self, outfile, level, name): + if self.content_type == MixedContainer.TypeString: + outfile.write('<%s>%s' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeInteger or \ + self.content_type == MixedContainer.TypeBoolean: + outfile.write('<%s>%d' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeFloat or \ + self.content_type == MixedContainer.TypeDecimal: + outfile.write('<%s>%f' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeDouble: + outfile.write('<%s>%g' % ( + self.name, self.value, self.name)) + elif self.content_type == MixedContainer.TypeBase64: + outfile.write('<%s>%s' % ( + self.name, + base64.b64encode(self.value), + self.name)) + def to_etree(self, element): + if self.category == MixedContainer.CategoryText: + # Prevent exporting empty content as empty lines. + if self.value.strip(): + if len(element) > 0: + if element[-1].tail is None: + element[-1].tail = self.value + else: + element[-1].tail += self.value + else: + if element.text is None: + element.text = self.value + else: + element.text += self.value + elif self.category == MixedContainer.CategorySimple: + subelement = etree_.SubElement( + element, '%s' % self.name) + subelement.text = self.to_etree_simple() + else: # category == MixedContainer.CategoryComplex + self.value.to_etree(element) + def to_etree_simple(self): + if self.content_type == MixedContainer.TypeString: + text = self.value + elif (self.content_type == MixedContainer.TypeInteger or + self.content_type == MixedContainer.TypeBoolean): + text = '%d' % self.value + elif (self.content_type == MixedContainer.TypeFloat or + self.content_type == MixedContainer.TypeDecimal): + text = '%f' % self.value + elif self.content_type == MixedContainer.TypeDouble: + text = '%g' % self.value + elif self.content_type == MixedContainer.TypeBase64: + text = '%s' % base64.b64encode(self.value) + return text + def exportLiteral(self, outfile, level, name): + if self.category == MixedContainer.CategoryText: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + elif self.category == MixedContainer.CategorySimple: + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % ( + self.category, self.content_type, + self.name, self.value)) + else: # category == MixedContainer.CategoryComplex + showIndent(outfile, level) + outfile.write( + 'model_.MixedContainer(%d, %d, "%s",\n' % ( + self.category, self.content_type, self.name,)) + self.value.exportLiteral(outfile, level + 1) + showIndent(outfile, level) + outfile.write(')\n') + + +class MemberSpec_(object): + def __init__(self, name='', data_type='', container=0, + optional=0, child_attrs=None, choice=None): + self.name = name + self.data_type = data_type + self.container = container + self.child_attrs = child_attrs + self.choice = choice + self.optional = optional + def set_name(self, name): self.name = name + def get_name(self): return self.name + def set_data_type(self, data_type): self.data_type = data_type + def get_data_type_chain(self): return self.data_type + def get_data_type(self): + if isinstance(self.data_type, list): + if len(self.data_type) > 0: + return self.data_type[-1] + else: + return 'xs:string' + else: + return self.data_type + def set_container(self, container): self.container = container + def get_container(self): return self.container + def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs + def get_child_attrs(self): return self.child_attrs + def set_choice(self, choice): self.choice = choice + def get_choice(self): return self.choice + def set_optional(self, optional): self.optional = optional + def get_optional(self): return self.optional + + +def _cast(typ, value): + if typ is None or value is None: + return value + return typ(value) + +# +# Data representation classes. +# + + +class tpOpcaoSimples(object): + _0='0' + _1='1' + _2='2' + _3='3' + + +class tpStatusNFe(object): + N='N' + C='C' + E='E' + + +class tpTipoRPS(object): + RPS='RPS' + RPSM='RPS-M' + RPSC='RPS-C' + + +class SignatureType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Id=None, SignedInfo=None, SignatureValue=None, KeyInfo=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Id = _cast(None, Id) + self.SignedInfo = SignedInfo + self.SignatureValue = SignatureValue + self.KeyInfo = KeyInfo + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SignatureType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SignatureType.subclass: + return SignatureType.subclass(*args_, **kwargs_) + else: + return SignatureType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.SignedInfo is not None or + self.SignatureValue is not None or + self.KeyInfo is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SignatureType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SignatureType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SignatureType'): + if self.Id is not None and 'Id' not in already_processed: + already_processed.add('Id') + outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.SignedInfo is not None: + self.SignedInfo.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SignedInfo', pretty_print=pretty_print) + if self.SignatureValue is not None: + self.SignatureValue.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SignatureValue', pretty_print=pretty_print) + if self.KeyInfo is not None: + self.KeyInfo.export(outfile, level, namespaceprefix_, namespacedef_='', name_='KeyInfo', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Id', node) + if value is not None and 'Id' not in already_processed: + already_processed.add('Id') + self.Id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'SignedInfo': + obj_ = SignedInfoType.factory(parent_object_=self) + obj_.build(child_) + self.SignedInfo = obj_ + obj_.original_tagname_ = 'SignedInfo' + elif nodeName_ == 'SignatureValue': + obj_ = SignatureValueType.factory(parent_object_=self) + obj_.build(child_) + self.SignatureValue = obj_ + obj_.original_tagname_ = 'SignatureValue' + elif nodeName_ == 'KeyInfo': + obj_ = KeyInfoType.factory(parent_object_=self) + obj_.build(child_) + self.KeyInfo = obj_ + obj_.original_tagname_ = 'KeyInfo' +# end class SignatureType + + +class SignatureValueType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Id=None, valueOf_=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Id = _cast(None, Id) + self.valueOf_ = valueOf_ + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SignatureValueType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SignatureValueType.subclass: + return SignatureValueType.subclass(*args_, **kwargs_) + else: + return SignatureValueType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + (1 if type(self.valueOf_) in [int,float] else self.valueOf_) + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureValueType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureValueType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SignatureValueType') + if self.hasContent_(): + outfile.write('>') + outfile.write(self.convert_unicode(self.valueOf_)) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SignatureValueType', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SignatureValueType'): + if self.Id is not None and 'Id' not in already_processed: + already_processed.add('Id') + outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureValueType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + self.valueOf_ = get_all_text_(node) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Id', node) + if value is not None and 'Id' not in already_processed: + already_processed.add('Id') + self.Id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class SignatureValueType + + +class SignedInfoType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Id=None, CanonicalizationMethod=None, SignatureMethod=None, Reference=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Id = _cast(None, Id) + self.CanonicalizationMethod = CanonicalizationMethod + self.SignatureMethod = SignatureMethod + if Reference is None: + self.Reference = [] + else: + self.Reference = Reference + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SignedInfoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SignedInfoType.subclass: + return SignedInfoType.subclass(*args_, **kwargs_) + else: + return SignedInfoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.CanonicalizationMethod is not None or + self.SignatureMethod is not None or + self.Reference + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignedInfoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignedInfoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SignedInfoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SignedInfoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SignedInfoType'): + if self.Id is not None and 'Id' not in already_processed: + already_processed.add('Id') + outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignedInfoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.CanonicalizationMethod is not None: + self.CanonicalizationMethod.export(outfile, level, namespaceprefix_, namespacedef_='', name_='CanonicalizationMethod', pretty_print=pretty_print) + if self.SignatureMethod is not None: + self.SignatureMethod.export(outfile, level, namespaceprefix_, namespacedef_='', name_='SignatureMethod', pretty_print=pretty_print) + for Reference_ in self.Reference: + Reference_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Reference', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Id', node) + if value is not None and 'Id' not in already_processed: + already_processed.add('Id') + self.Id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'CanonicalizationMethod': + obj_ = CanonicalizationMethodType.factory(parent_object_=self) + obj_.build(child_) + self.CanonicalizationMethod = obj_ + obj_.original_tagname_ = 'CanonicalizationMethod' + elif nodeName_ == 'SignatureMethod': + obj_ = SignatureMethodType.factory(parent_object_=self) + obj_.build(child_) + self.SignatureMethod = obj_ + obj_.original_tagname_ = 'SignatureMethod' + elif nodeName_ == 'Reference': + obj_ = ReferenceType.factory(parent_object_=self) + obj_.build(child_) + self.Reference.append(obj_) + obj_.original_tagname_ = 'Reference' +# end class SignedInfoType + + +class ReferenceType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Id=None, URI=None, Type=None, Transforms=None, DigestMethod=None, DigestValue=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Id = _cast(None, Id) + self.URI = _cast(None, URI) + self.Type = _cast(None, Type) + self.Transforms = Transforms + self.DigestMethod = DigestMethod + self.DigestValue = DigestValue + self.validate_DigestValueType(self.DigestValue) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, ReferenceType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if ReferenceType.subclass: + return ReferenceType.subclass(*args_, **kwargs_) + else: + return ReferenceType(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_DigestValueType(self, value): + # Validate type DigestValueType, a restriction on xs:base64Binary. + if value is not None and Validate_simpletypes_: + pass + def hasContent_(self): + if ( + self.Transforms is not None or + self.DigestMethod is not None or + self.DigestValue is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReferenceType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('ReferenceType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ReferenceType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='ReferenceType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ReferenceType'): + if self.Id is not None and 'Id' not in already_processed: + already_processed.add('Id') + outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) + if self.URI is not None and 'URI' not in already_processed: + already_processed.add('URI') + outfile.write(' URI=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.URI), input_name='URI')), )) + if self.Type is not None and 'Type' not in already_processed: + already_processed.add('Type') + outfile.write(' Type=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Type), input_name='Type')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='ReferenceType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Transforms is not None: + self.Transforms.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Transforms', pretty_print=pretty_print) + if self.DigestMethod is not None: + self.DigestMethod.export(outfile, level, namespaceprefix_, namespacedef_='', name_='DigestMethod', pretty_print=pretty_print) + if self.DigestValue is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sDigestValue>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.DigestValue, input_name='DigestValue'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Id', node) + if value is not None and 'Id' not in already_processed: + already_processed.add('Id') + self.Id = value + value = find_attr_value_('URI', node) + if value is not None and 'URI' not in already_processed: + already_processed.add('URI') + self.URI = value + value = find_attr_value_('Type', node) + if value is not None and 'Type' not in already_processed: + already_processed.add('Type') + self.Type = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Transforms': + obj_ = TransformsType.factory(parent_object_=self) + obj_.build(child_) + self.Transforms = obj_ + obj_.original_tagname_ = 'Transforms' + elif nodeName_ == 'DigestMethod': + obj_ = DigestMethodType.factory(parent_object_=self) + obj_.build(child_) + self.DigestMethod = obj_ + obj_.original_tagname_ = 'DigestMethod' + elif nodeName_ == 'DigestValue': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'DigestValue') + else: + bval_ = None + self.DigestValue = bval_ + # validate type DigestValueType + self.validate_DigestValueType(self.DigestValue) +# end class ReferenceType + + +class TransformsType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Transform=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + if Transform is None: + self.Transform = [] + else: + self.Transform = Transform + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TransformsType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TransformsType.subclass: + return TransformsType.subclass(*args_, **kwargs_) + else: + return TransformsType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.Transform + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransformsType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformsType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransformsType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransformsType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransformsType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransformsType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for Transform_ in self.Transform: + Transform_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Transform', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Transform': + obj_ = TransformType.factory(parent_object_=self) + obj_.build(child_) + self.Transform.append(obj_) + obj_.original_tagname_ = 'Transform' +# end class TransformsType + + +class TransformType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Algorithm=None, XPath=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Algorithm = _cast(None, Algorithm) + if XPath is None: + self.XPath = [] + else: + self.XPath = XPath + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, TransformType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if TransformType.subclass: + return TransformType.subclass(*args_, **kwargs_) + else: + return TransformType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.XPath + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransformType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('TransformType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TransformType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='TransformType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TransformType'): + if self.Algorithm is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + outfile.write(' Algorithm=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Algorithm), input_name='Algorithm')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='TransformType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + for XPath_ in self.XPath: + showIndent(outfile, level, pretty_print) + outfile.write('<%sXPath>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(XPath_), input_name='XPath')), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Algorithm', node) + if value is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + self.Algorithm = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'XPath': + XPath_ = child_.text + XPath_ = self.gds_validate_string(XPath_, node, 'XPath') + self.XPath.append(XPath_) +# end class TransformType + + +class KeyInfoType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Id=None, X509Data=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Id = _cast(None, Id) + self.X509Data = X509Data + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, KeyInfoType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if KeyInfoType.subclass: + return KeyInfoType.subclass(*args_, **kwargs_) + else: + return KeyInfoType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.X509Data is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='KeyInfoType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyInfoType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='KeyInfoType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='KeyInfoType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='KeyInfoType'): + if self.Id is not None and 'Id' not in already_processed: + already_processed.add('Id') + outfile.write(' Id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Id), input_name='Id')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='KeyInfoType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.X509Data is not None: + self.X509Data.export(outfile, level, namespaceprefix_, namespacedef_='', name_='X509Data', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Id', node) + if value is not None and 'Id' not in already_processed: + already_processed.add('Id') + self.Id = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'X509Data': + obj_ = X509DataType.factory(parent_object_=self) + obj_.build(child_) + self.X509Data = obj_ + obj_.original_tagname_ = 'X509Data' +# end class KeyInfoType + + +class KeyValueType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, RSAKeyValue=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.RSAKeyValue = RSAKeyValue + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, KeyValueType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if KeyValueType.subclass: + return KeyValueType.subclass(*args_, **kwargs_) + else: + return KeyValueType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.RSAKeyValue is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='KeyValueType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('KeyValueType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='KeyValueType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='KeyValueType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='KeyValueType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='KeyValueType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.RSAKeyValue is not None: + self.RSAKeyValue.export(outfile, level, namespaceprefix_, namespacedef_='', name_='RSAKeyValue', pretty_print=pretty_print) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'RSAKeyValue': + obj_ = RSAKeyValueType.factory(parent_object_=self) + obj_.build(child_) + self.RSAKeyValue = obj_ + obj_.original_tagname_ = 'RSAKeyValue' +# end class KeyValueType + + +class X509DataType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, X509Certificate=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.X509Certificate = X509Certificate + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, X509DataType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if X509DataType.subclass: + return X509DataType.subclass(*args_, **kwargs_) + else: + return X509DataType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + self.X509Certificate is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='X509DataType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('X509DataType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='X509DataType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='X509DataType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='X509DataType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='X509DataType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.X509Certificate is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sX509Certificate>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.X509Certificate, input_name='X509Certificate'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'X509Certificate': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'X509Certificate') + else: + bval_ = None + self.X509Certificate = bval_ +# end class X509DataType + + +class CanonicalizationMethodType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Algorithm=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Algorithm = _cast(None, Algorithm) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, CanonicalizationMethodType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if CanonicalizationMethodType.subclass: + return CanonicalizationMethodType.subclass(*args_, **kwargs_) + else: + return CanonicalizationMethodType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CanonicalizationMethodType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('CanonicalizationMethodType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='CanonicalizationMethodType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='CanonicalizationMethodType', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CanonicalizationMethodType'): + if self.Algorithm is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + outfile.write(' Algorithm=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Algorithm), input_name='Algorithm')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CanonicalizationMethodType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Algorithm', node) + if value is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + self.Algorithm = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class CanonicalizationMethodType + + +class SignatureMethodType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Algorithm=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Algorithm = _cast(None, Algorithm) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, SignatureMethodType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if SignatureMethodType.subclass: + return SignatureMethodType.subclass(*args_, **kwargs_) + else: + return SignatureMethodType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureMethodType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('SignatureMethodType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SignatureMethodType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='SignatureMethodType', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SignatureMethodType'): + if self.Algorithm is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + outfile.write(' Algorithm=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Algorithm), input_name='Algorithm')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='SignatureMethodType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Algorithm', node) + if value is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + self.Algorithm = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class SignatureMethodType + + +class DigestMethodType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Algorithm=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Algorithm = _cast(None, Algorithm) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, DigestMethodType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if DigestMethodType.subclass: + return DigestMethodType.subclass(*args_, **kwargs_) + else: + return DigestMethodType(*args_, **kwargs_) + factory = staticmethod(factory) + def hasContent_(self): + if ( + + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DigestMethodType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('DigestMethodType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='DigestMethodType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='DigestMethodType', pretty_print=pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='DigestMethodType'): + if self.Algorithm is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + outfile.write(' Algorithm=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.Algorithm), input_name='Algorithm')), )) + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='DigestMethodType', fromsubclass_=False, pretty_print=True): + pass + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + value = find_attr_value_('Algorithm', node) + if value is not None and 'Algorithm' not in already_processed: + already_processed.add('Algorithm') + self.Algorithm = value + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + pass +# end class DigestMethodType + + +class RSAKeyValueType(GeneratedsSuper): + subclass = None + superclass = None + def __init__(self, Modulus=None, Exponent=None, **kwargs_): + self.original_tagname_ = None + self.parent_object_ = kwargs_.get('parent_object_') + self.Modulus = Modulus + self.validate_CryptoBinary(self.Modulus) + self.Exponent = Exponent + self.validate_CryptoBinary(self.Exponent) + def factory(*args_, **kwargs_): + if CurrentSubclassModule_ is not None: + subclass = getSubclassFromModule_( + CurrentSubclassModule_, RSAKeyValueType) + if subclass is not None: + return subclass(*args_, **kwargs_) + if RSAKeyValueType.subclass: + return RSAKeyValueType.subclass(*args_, **kwargs_) + else: + return RSAKeyValueType(*args_, **kwargs_) + factory = staticmethod(factory) + def validate_CryptoBinary(self, value): + # Validate type CryptoBinary, a restriction on xs:base64Binary. + if value is not None and Validate_simpletypes_: + pass + def hasContent_(self): + if ( + self.Modulus is not None or + self.Exponent is not None + ): + return True + else: + return False + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RSAKeyValueType', pretty_print=True): + imported_ns_def_ = GenerateDSNamespaceDefs_.get('RSAKeyValueType') + if imported_ns_def_ is not None: + namespacedef_ = imported_ns_def_ + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.original_tagname_ is not None: + name_ = self.original_tagname_ + showIndent(outfile, level, pretty_print) + outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', )) + already_processed = set() + self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RSAKeyValueType') + if self.hasContent_(): + outfile.write('>%s' % (eol_, )) + self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='RSAKeyValueType', pretty_print=pretty_print) + showIndent(outfile, level, pretty_print) + outfile.write('%s' % (namespaceprefix_, name_, eol_)) + else: + outfile.write('/>%s' % (eol_, )) + def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RSAKeyValueType'): + pass + def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RSAKeyValueType', fromsubclass_=False, pretty_print=True): + if pretty_print: + eol_ = '\n' + else: + eol_ = '' + if self.Modulus is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sModulus>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.Modulus, input_name='Modulus'), namespaceprefix_ , eol_)) + if self.Exponent is not None: + showIndent(outfile, level, pretty_print) + outfile.write('<%sExponent>%s%s' % (namespaceprefix_ , self.gds_format_base64(self.Exponent, input_name='Exponent'), namespaceprefix_ , eol_)) + def build(self, node): + already_processed = set() + self.buildAttributes(node, node.attrib, already_processed) + for child in node: + nodeName_ = Tag_pattern_.match(child.tag).groups()[-1] + self.buildChildren(child, node, nodeName_) + return self + def buildAttributes(self, node, attrs, already_processed): + pass + def buildChildren(self, child_, node, nodeName_, fromsubclass_=False): + if nodeName_ == 'Modulus': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'Modulus') + else: + bval_ = None + self.Modulus = bval_ + # validate type CryptoBinary + self.validate_CryptoBinary(self.Modulus) + elif nodeName_ == 'Exponent': + sval_ = child_.text + if sval_ is not None: + try: + bval_ = base64.b64decode(sval_) + except (TypeError, ValueError) as exp: + raise_parse_error(child_, 'requires base64 encoded string: %s' % exp) + bval_ = self.gds_validate_base64(bval_, node, 'Exponent') + else: + bval_ = None + self.Exponent = bval_ + # validate type CryptoBinary + self.validate_CryptoBinary(self.Exponent) +# end class RSAKeyValueType + + +GDSClassesMapping = { + 'Signature': SignatureType, +} + + +USAGE_TEXT = """ +Usage: python .py [ -s ] +""" + + +def usage(): + print(USAGE_TEXT) + sys.exit(1) + + +def get_root_tag(node): + tag = Tag_pattern_.match(node.tag).groups()[-1] + rootClass = GDSClassesMapping.get(tag) + if rootClass is None: + rootClass = globals().get(tag) + return tag, rootClass + + +def parse(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SignatureType' + rootClass = SignatureType + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:ds="http://www.w3.org/2000/09/xmldsig#"', + pretty_print=True) + return rootObj + + +def parseEtree(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SignatureType' + rootClass = SignatureType + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + mapping = {} + rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping) + reverse_mapping = rootObj.gds_reverse_node_mapping(mapping) + if not silence: + content = etree_.tostring( + rootElement, pretty_print=True, + xml_declaration=True, encoding="utf-8") + sys.stdout.write(content) + sys.stdout.write('\n') + return rootObj, rootElement, mapping, reverse_mapping + + +def parseString(inString, silence=False): + '''Parse a string, create the object tree, and export it. + + Arguments: + - inString -- A string. This XML fragment should not start + with an XML declaration containing an encoding. + - silence -- A boolean. If False, export the object. + Returns -- The root object in the tree. + ''' + parser = None + rootNode= parsexmlstring_(inString, parser) + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SignatureType' + rootClass = SignatureType + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + if not silence: + sys.stdout.write('\n') + rootObj.export( + sys.stdout, 0, name_=rootTag, + namespacedef_='xmlns:ds="http://www.w3.org/2000/09/xmldsig#"') + return rootObj + + +def parseLiteral(inFileName, silence=False): + parser = None + doc = parsexml_(inFileName, parser) + rootNode = doc.getroot() + rootTag, rootClass = get_root_tag(rootNode) + if rootClass is None: + rootTag = 'SignatureType' + rootClass = SignatureType + rootObj = rootClass.factory() + rootObj.build(rootNode) + # Enable Python to collect the space used by the DOM. + doc = None + if not silence: + sys.stdout.write('#from xmldsig-core-schema_v01 import *\n\n') + sys.stdout.write('import xmldsig-core-schema_v01 as model_\n\n') + sys.stdout.write('rootObj = model_.rootClass(\n') + rootObj.exportLiteral(sys.stdout, 0, name_=rootTag) + sys.stdout.write(')\n') + return rootObj + + +def main(): + args = sys.argv[1:] + if len(args) == 1: + parse(args[0]) + else: + usage() + + +if __name__ == '__main__': + #import pdb; pdb.set_trace() + main() + + +__all__ = [ + "CanonicalizationMethodType", + "DigestMethodType", + "KeyInfoType", + "KeyValueType", + "RSAKeyValueType", + "ReferenceType", + "SignatureMethodType", + "SignatureType", + "SignatureValueType", + "SignedInfoType", + "TransformType", + "TransformsType", + "X509DataType" +] diff --git a/schemas/paulistana/v02/PedidoCancelamentoLote_v01.xsd b/schemas/paulistana/v02/PedidoCancelamentoLote_v01.xsd new file mode 100644 index 0000000..8fb0ea1 --- /dev/null +++ b/schemas/paulistana/v02/PedidoCancelamentoLote_v01.xsd @@ -0,0 +1,47 @@ + + + + + + + Schema utilizado para PEDIDO de cancelamento de lote. + Este Schema XML é utilizado pelos prestadores de serviços cancelarem as NFS-e geradas a partir de um lote de RPS. + + + + + + Cabeçalho do pedido de cancelamento de lote. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe o número do Lote a ser cancelado. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Assinatura digital do CNPJ emissor dos RPS. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoCancelamentoNFTS_v01.xsd b/schemas/paulistana/v02/PedidoCancelamentoNFTS_v01.xsd new file mode 100644 index 0000000..421b30d --- /dev/null +++ b/schemas/paulistana/v02/PedidoCancelamentoNFTS_v01.xsd @@ -0,0 +1,71 @@ + + + + + + + + Schema utilizado para PEDIDO de cancelamento de uma NFTS. + Este Schema XML é utilizado para os tomadores/intermediários de serviços cancelarem uma NFTS. + + + + + + Cabeçalho do pedido de cancelamento de uma NFTS. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe se as NFTS a serem canceladas farão parte de uma mesma transação. + True - As NFTS só serão canceladas se não ocorrer nenhum evento de erro durante o processamento de todo o lote de cancelamento; + False - As NFTS válidos serão canceladas, mesmo que ocorram eventos de erro durante processamento de outras NFTS deste lote de cancelamento. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + Detalhe do pedido de cancelamento de NFTS. + + + + + + Informe a chave da NFTS a ser cancelada. + + + + + Assinatura da NFTS a ser cancelada. + + + + + + + + Assinatura digital do contribuinte que gerou as informações de cancelamento da NFTS. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoCancelamentoNFe_v01.xsd b/schemas/paulistana/v02/PedidoCancelamentoNFe_v01.xsd new file mode 100644 index 0000000..9068db1 --- /dev/null +++ b/schemas/paulistana/v02/PedidoCancelamentoNFe_v01.xsd @@ -0,0 +1,66 @@ + + + + + + + Schema utilizado para PEDIDO de Cancelamento de NFS-e. + Este Schema XML é utilizado pelos Prestadores de serviços cancelarem NFS-e emitidas por eles. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe se as NFS-e a serem canceladas farão parte de uma mesma transação. True - As NFS-e só serão canceladas se não ocorrer nenhum evento de erro durante o processamento de todo o lote; False - As NFS-e aptas a serem canceladas serão canceladas, mesmo que ocorram eventos de erro durante processamento do cancelamento de outras NFS-e deste lote. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Detalhe do pedido de cancelamento de NFS-e. Cada detalhe deverá conter a Chave de uma NFS-e e sua respectiva assinatura de cancelamento. + + + + + + Chave da NFS-e a ser cancelada. + + + + + Assinatura da NFS-e a ser cancelada. + + + + + + + + Assinatura digital do CNPJ emissor das NFS-e + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoConsultaCNPJ_v01.xsd b/schemas/paulistana/v02/PedidoConsultaCNPJ_v01.xsd new file mode 100644 index 0000000..84803f3 --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaCNPJ_v01.xsd @@ -0,0 +1,47 @@ + + + + + + + Schema utilizado para PEDIDO de consultas de CNPJ. + Este Schema XML é utilizado pelos tomadores e/ou prestadores de serviços consultarem quais Inscrições Municipais (CCM) estão vinculadas a um determinado CNPJ e se estes CCM emitem NFS-e ou não. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Informe o CNPJ do Contribuinte que se deseja consultar. + + + + + Assinatura digital do CNPJ tomador/prestador que gerou a mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoConsultaCPOM_v01.xsd b/schemas/paulistana/v02/PedidoConsultaCPOM_v01.xsd new file mode 100644 index 0000000..3a88e40 --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaCPOM_v01.xsd @@ -0,0 +1,57 @@ + + + + + + + + Schema utilizado para PEDIDO de consultas ao CPOM. + Este Schema XML é utilizado para os tomadores/intermediários consultarem + se um CNPJ de um prestador de serviços tem inscrição no CPOM. + + + + + + Cabeçalho do pedido de consulta ao CPOM. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + + + + Informe o CPF/CNPJ do prestador de serviço. + + + + + + + + Assinatura digital do contribuinte que gerou a consulta ao CPOM. + + + + + + diff --git a/schemas/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.xsd b/schemas/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.xsd new file mode 100644 index 0000000..e4d1ebe --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaEmissaoNFSE_v01.xsd @@ -0,0 +1,57 @@ + + + + + + + + Schema utilizado para PEDIDO de consultas a autorização de emissão da NFSe. + Este Schema XML é utilizado para os tomadores/intermediários consultarem + se um CNPJ de um prestador de serviços possui autorização para emissão de NFSe. + + + + + + Cabeçalho do pedido de consulta a autorização a emissão da NFSe. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + + + + Informe o CPF/CNPJ do prestador de serviço. + + + + + + + + Assinatura digital do contribuinte que gerou a consulta. + + + + + + diff --git a/schemas/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.xsd b/schemas/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.xsd new file mode 100644 index 0000000..0e73081 --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaInformacoesLoteNFTS_v01.xsd @@ -0,0 +1,56 @@ + + + + + + + + Schema utilizado para PEDIDO de consultas de informações do Lote de NFTS. + Este Schema XML é utilizado para os tomadores/intermediários de serviços consultarem as informações do lote de NFTS. + + + + + + Cabeçalho do pedido de consulta de informações do lote de NFTS. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + + + + Informe o número do Lote que deseja consultar. + + + + + + + + Assinatura digital do contribuinte que gerou a consulta de informações do lote de NFTS. + + + + + + diff --git a/schemas/paulistana/v02/PedidoConsultaLoteNFTS_v01.xsd b/schemas/paulistana/v02/PedidoConsultaLoteNFTS_v01.xsd new file mode 100644 index 0000000..a0f27bc --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaLoteNFTS_v01.xsd @@ -0,0 +1,56 @@ + + + + + + + + Schema utilizado para PEDIDO de consultas de Lote de NFTS. + Este Schema XML é utilizado para os tomadores/intermediários de serviços consultarem as NFTS geradas a partir de um lote de NFTS. + + + + + + Cabeçalho do pedido de consulta de lote de NFTS. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + + + + Informe o número do Lote que deseja consultar. + + + + + + + + Assinatura digital do contribuinte que gerou a consulta de lote de NFTS. + + + + + + diff --git a/schemas/paulistana/v02/PedidoConsultaLote_v01.xsd b/schemas/paulistana/v02/PedidoConsultaLote_v01.xsd new file mode 100644 index 0000000..de96f71 --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaLote_v01.xsd @@ -0,0 +1,47 @@ + + + + + + + Schema utilizado para PEDIDO de consultas de Lote. + Este Schema XML é utilizado pelos prestadores de serviços consultarem as NFS-e geradas a partir de um lote de RPS. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe o número do Lote que deseja consultar. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Assinatura digital do contribuinte que gerou o lote de RPS. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoConsultaNFTS_v01.xsd b/schemas/paulistana/v02/PedidoConsultaNFTS_v01.xsd new file mode 100644 index 0000000..578297c --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaNFTS_v01.xsd @@ -0,0 +1,59 @@ + + + + + + + + Schema utilizado para PEDIDO de consultas de NFTS. + Este Schema XML é utilizado para os tomadores/intermediários de serviços consultar NFTS. + + + + + + Cabeçalho do pedido de consulta de NFTS. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + Enviar consulta de uma ou várias NFTS. + + + + + + Informe a chave da NFTS a ser cancelada. + + + + + + + + Assinatura digital do contribuinte que gerou as NFTS. + + + + + + diff --git a/schemas/paulistana/v02/PedidoConsultaNFePeriodo_v01.xsd b/schemas/paulistana/v02/PedidoConsultaNFePeriodo_v01.xsd new file mode 100644 index 0000000..bbe726a --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaNFePeriodo_v01.xsd @@ -0,0 +1,69 @@ + + + + + + + Schema utilizado para PEDIDO de consulta de NFS-e Emitidas ou Recebidas por período. + Este Schema XML é utilizado pelos Prestadores/Tomadores de serviços consultarem NFS-e Emitidas ou Recebidas por eles. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Para consulta de NFS-e Recebidas Informe o CNPJ do Tomador. + Para consulta de NFS-e Emitidas Informe o CNPJ do Prestador. + + + + + Para consulta de NFS-e Recebidas Informe a Inscrição Municipal do Tomador. + Para consulta de NFS-e Emitidas Informe a Inscrição Municipal do Prestador. Neste caso o preenchimento deste campo se torna obrigatório. + + + + + Informe a data de início do período a ser consultado (AAAA-MM-DD). + + + + + Informe a data final do período trasmitido (AAAA-MM-DD). + + + + + Informe o número da página que deseja consultar. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Assinatura digital do tomador das NFS-e. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoConsultaNFe_v01.xsd b/schemas/paulistana/v02/PedidoConsultaNFe_v01.xsd new file mode 100644 index 0000000..f228196 --- /dev/null +++ b/schemas/paulistana/v02/PedidoConsultaNFe_v01.xsd @@ -0,0 +1,53 @@ + + + + + + + Schema utilizado para PEDIDO de consultas de NFS-e. + Este Schema XML é utilizado pelos prestadores de serviços consultarem NFS-e geradas por eles. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Detalhe do pedido. Cada item de detalhe deverá conter a chave de uma NFS-e ou a chave de um RPS. + + + + + + + + + + + Assinatura digital do contribuinte que gerou as NFS-e/RPS. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoEnvioLoteNFTS_v01.xsd b/schemas/paulistana/v02/PedidoEnvioLoteNFTS_v01.xsd new file mode 100644 index 0000000..7a32b7a --- /dev/null +++ b/schemas/paulistana/v02/PedidoEnvioLoteNFTS_v01.xsd @@ -0,0 +1,79 @@ + + + + + + + + Schema utilizado para PEDIDO de envio de lote de NFTS. + Este Schema XML é utilizado pelos tomadores/intermediários de serviços para emissão de NFTS. + + + + + + Cabeçalho do pedido NFTS. + + + + + + Informe os dados do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe se as NFTS a serem emitidas farão parte de uma mesma transação. True - As NFTS só serão emitidas se não ocorrer nenhum evento de erro durante o processamento de todo o lote; False - As NFTS válidos serão emitidas, mesmo que ocorram eventos de erro durante processamento de outras NFTS deste lote. + + + + + Informe a data de início do período transmitido (AAAA-MM-DD). + + + + + Informe a data final do período transmitido (AAAA-MM-DD). + + + + + Informe o total de NFTS contidos na mensagem XML. + + + + + Informe o valor total dos serviços das NFTS contidos na mensagem XML. + + + + + Informe o valor total das deduções das NFTS contidos na mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + Informe as NFTS a serem emitidas. + + + + + Assinatura digital do contribuinte que gerou as NFTS contidos na mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoEnvioLoteRPS_v01.xsd b/schemas/paulistana/v02/PedidoEnvioLoteRPS_v01.xsd new file mode 100644 index 0000000..6badb22 --- /dev/null +++ b/schemas/paulistana/v02/PedidoEnvioLoteRPS_v01.xsd @@ -0,0 +1,77 @@ + + + + + + + Schema utilizado para PEDIDO de envio de lote de RPS. + Este Schema XML é utilizado pelos prestadores de serviços para substituição em lote de RPS por NFS-e. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe se os RPS a serem substituídos por NFS-e farão parte de uma mesma transação. True - Os RPS só serão substituídos por NFS-e se não ocorrer nenhum evento de erro durante o processamento de todo o lote; False - Os RPS válidos serão substituídos por NFS-e, mesmo que ocorram eventos de erro durante processamento de outros RPS deste lote. + + + + + Informe a data de início do período transmitido (AAAA-MM-DD). + + + + + Informe a data final do período transmitido (AAAA-MM-DD). + + + + + Informe o total de RPS contidos na mensagem XML. + + + + + Informe o valor total dos serviços prestados dos RPS contidos na mensagem XML. + + + + + Informe o valor total das deduções dos RPS contidos na mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Informe os RPS a serem substituidos por NFS-e. + + + + + Assinatura digital do contribuinte que gerou os RPS contidos na mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoEnvioNFTS_v01.xsd b/schemas/paulistana/v02/PedidoEnvioNFTS_v01.xsd new file mode 100644 index 0000000..e352f1f --- /dev/null +++ b/schemas/paulistana/v02/PedidoEnvioNFTS_v01.xsd @@ -0,0 +1,49 @@ + + + + + + + + Schema utilizado para PEDIDO de envio de NFTS. + Este Schema XML é utilizado pelos tomadores/intermediários de serviços para emissão de NFTS. + + + + + + Cabeçalho do pedido NFTS. + + + + + + Informe os dados do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + + Informe as NFTS a ser emitida. + + + + + Assinatura digital do contribuinte que gerou as NFTS contidos na mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoEnvioRPS_v01.xsd b/schemas/paulistana/v02/PedidoEnvioRPS_v01.xsd new file mode 100644 index 0000000..e880a9c --- /dev/null +++ b/schemas/paulistana/v02/PedidoEnvioRPS_v01.xsd @@ -0,0 +1,47 @@ + + + + + + + Schema utilizado para PEDIDO de envio de RPS. + Este Schema XML é utilizado pelos prestadores de serviços para substituição online e individual de RPS por NFS-e. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Informe o RPS a ser substituido por NFS-e. + + + + + Assinatura digital do contribuinte que gerou o RPS contido da mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/PedidoInformacoesLote_v01.xsd b/schemas/paulistana/v02/PedidoInformacoesLote_v01.xsd new file mode 100644 index 0000000..c5b28cf --- /dev/null +++ b/schemas/paulistana/v02/PedidoInformacoesLote_v01.xsd @@ -0,0 +1,52 @@ + + + + + + + Schema utilizado para PEDIDO de informações de lote. + Este Schema XML é utilizado pelos prestadores de serviços para obterem informações de lotes de RPS que geraram NFS-e. + + + + + + Cabeçalho do pedido. + + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + Informe o número do lote que deseja obter informações. Caso não seja informado o número do lote, serão retornadas informações do último lote gerador de NFS-e. + + + + + Informe a Inscrição municipal do prestador de serviços que gerou o lote. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + + Assinatura digital do contribuinte que gerou o lote de RPS. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoCancelamentoNFTS_v01.xsd b/schemas/paulistana/v02/RetornoCancelamentoNFTS_v01.xsd new file mode 100644 index 0000000..d3e15a1 --- /dev/null +++ b/schemas/paulistana/v02/RetornoCancelamentoNFTS_v01.xsd @@ -0,0 +1,28 @@ + + + + + + + + Schema utilizado para RETORNO de Pedidos de cancelamento da NFTS. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores/intermediários de serviços o resultado do pedido de cancelamento de uma NFTS. + + + + + + + + Elemento que representa a ocorrência de eventos durante o processamento da mensagem XML. + + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoCancelamentoNFe_v01.xsd b/schemas/paulistana/v02/RetornoCancelamentoNFe_v01.xsd new file mode 100644 index 0000000..2582150 --- /dev/null +++ b/schemas/paulistana/v02/RetornoCancelamentoNFe_v01.xsd @@ -0,0 +1,47 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de cancelamento de NFS-e. + Este Schema XML é utilizado pelo Web Service para informar aos prestadores de serviços qual o resultado do pedido de cancelamento de NFS-e. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoConsultaCNPJ_v01.xsd b/schemas/paulistana/v02/RetornoConsultaCNPJ_v01.xsd new file mode 100644 index 0000000..11fb0d2 --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsultaCNPJ_v01.xsd @@ -0,0 +1,63 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de Consultas de CNPJ. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores e/ou prestadores de serviços quais Inscrições Municipais (CCM) estão vinculadas a um determinado CNPJ e se estes CCM emitem NFS-e ou não. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + + + + Inscrição Municipal vinculada ao CNPJ consultado. + + + + + Campo que indica se o CCM vinculado ao CNPJ consultado emite NFS-e ou não. + + + + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoConsultaCPOM_v01.xsd b/schemas/paulistana/v02/RetornoConsultaCPOM_v01.xsd new file mode 100644 index 0000000..49a9ba5 --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsultaCPOM_v01.xsd @@ -0,0 +1,23 @@ + + + + + + + + Schema utilizado para RETORNO de consultas ao CPOM. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores/intermediários de serviços + o resultado do pedido de consulta de inscrição no CPOM. + + + + + + + + + diff --git a/schemas/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.xsd b/schemas/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.xsd new file mode 100644 index 0000000..4558bf1 --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsultaEmissaoNFSE_v01.xsd @@ -0,0 +1,23 @@ + + + + + + + + Schema utilizado para RETORNO de consultas a autorização de emissão da NFSE. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores/intermediários de serviços + o resultado do pedido de consulta de autorização a emissão da NFSE. + + + + + + + + + diff --git a/schemas/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.xsd b/schemas/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.xsd new file mode 100644 index 0000000..1d53991 --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsultaInformacoesLoteNFTS_v01.xsd @@ -0,0 +1,25 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de Informações de Lote de NFTS. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores/intermediários de serviços o resultado do pedido de informações de lote de NFTS. + + + + + + + Elemento que representa a ocorrência de eventos ou NFTS durante o processamento da mensagem XML. + + + + + + diff --git a/schemas/paulistana/v02/RetornoConsultaNFTS_v01.xsd b/schemas/paulistana/v02/RetornoConsultaNFTS_v01.xsd new file mode 100644 index 0000000..4869af3 --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsultaNFTS_v01.xsd @@ -0,0 +1,25 @@ + + + + + + + Schema utilizado para RETORNO de pedidos de consulta de NFTS, consulta de NFTS recebidas e consulta de lote de NFTS. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores e/ou intermediários de serviços o resultado de pedidos de consulta de NFTS, consulta de NFTS recebidas e consulta de lote de NFTS. + + + + + + + Elemento que representa a ocorrência de eventos ou NFTS durante o processamento da mensagem XML. + + + + + + diff --git a/schemas/paulistana/v02/RetornoConsulta_v01.xsd b/schemas/paulistana/v02/RetornoConsulta_v01.xsd new file mode 100644 index 0000000..883409d --- /dev/null +++ b/schemas/paulistana/v02/RetornoConsulta_v01.xsd @@ -0,0 +1,52 @@ + + + + + + + Schema utilizado para RETORNO de pedidos de consulta de NFS-e/RPS, consultade NFS-e recebidas e consulta de lote. + Este Schema XML é utilizado pelo Web Service para informar aos tomadores e/ou prestadores de serviços o resultado de pedidos de consulta de NFS-e/RPS, consultade NFS-e recebidas e consulta de lote. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + Elemento NFe - Cada item será um NFS-e. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoEnvioLoteNFTS_v01.xsd b/schemas/paulistana/v02/RetornoEnvioLoteNFTS_v01.xsd new file mode 100644 index 0000000..170e448 --- /dev/null +++ b/schemas/paulistana/v02/RetornoEnvioLoteNFTS_v01.xsd @@ -0,0 +1,24 @@ + + + + + + + Schema utilizado para Retorno de envio de lote de NFTS. + Este Schema XML é utilizado para informar os tomadores/intermediários de serviços o resultado do pedido do envio do lote da emissão de NFTS. + + + + + + + Elemento que representa a ocorrência de eventos ou NFTS durante o processamento da mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoEnvioLoteRPS_v01.xsd b/schemas/paulistana/v02/RetornoEnvioLoteRPS_v01.xsd new file mode 100644 index 0000000..f7f192a --- /dev/null +++ b/schemas/paulistana/v02/RetornoEnvioLoteRPS_v01.xsd @@ -0,0 +1,57 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de Envio de lote de RPS. + Este Schema XML é utilizado pelo Web Service para informar aos prestadores de serviços o resultado do pedido de envio de lote de RPS. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + Informações sobre o lote processado. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + Chave da NFS-e e Chave do RPS que esta substitui. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoEnvioNFTS_v01.xsd b/schemas/paulistana/v02/RetornoEnvioNFTS_v01.xsd new file mode 100644 index 0000000..9b18740 --- /dev/null +++ b/schemas/paulistana/v02/RetornoEnvioNFTS_v01.xsd @@ -0,0 +1,29 @@ + + + + + + + Schema utilizado para Retorno de envio de lote de NFTS. + Este Schema XML é utilizado para informar os tomadores/intermediários de serviços o resultado do pedido do envio do lote da emissão de NFTS. + + + + + + + Elemento que representa a ocorrência de eventos ou NFTS durante o processamento da mensagem XML. + + + + + Identificação da NFTS gerada. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoEnvioRPS_v01.xsd b/schemas/paulistana/v02/RetornoEnvioRPS_v01.xsd new file mode 100644 index 0000000..5781d12 --- /dev/null +++ b/schemas/paulistana/v02/RetornoEnvioRPS_v01.xsd @@ -0,0 +1,52 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de Envio de RPS. + Este Schema XML é utilizado pelo Web Service para informar aos prestadores de serviços o resultado do pedido de envio de RPS. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + Chave da NFS-e e Chave do RPS que esta substitui. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/RetornoInformacoesLote_v01.xsd b/schemas/paulistana/v02/RetornoInformacoesLote_v01.xsd new file mode 100644 index 0000000..770c06e --- /dev/null +++ b/schemas/paulistana/v02/RetornoInformacoesLote_v01.xsd @@ -0,0 +1,52 @@ + + + + + + + Schema utilizado para RETORNO de Pedidos de Informações de Lote. + Este Schema XML é utilizado pelo Web Service para informar aos prestadores de serviços o resultado do pedido de informações de lote. + + + + + + Cabeçalho do retorno. + + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + Informações do lote consultado. + + + + + + Versão do Schema XML utilizado. + + + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/TiposNFTS_v01.xsd b/schemas/paulistana/v02/TiposNFTS_v01.xsd new file mode 100644 index 0000000..299238a --- /dev/null +++ b/schemas/paulistana/v02/TiposNFTS_v01.xsd @@ -0,0 +1,1113 @@ + + + + + + + Tipo utilizado para valor de alíquota + + + + + + + + + + Assinatura digital do RPS emitido. + + + + + + + Assinatura digital de cancelamento da NFTS. + + + + + + + Tipo bairro. + + + + + + + + + + Tipo CEP. + + + + + + + + Tipo cidade. + + + + + + + + Tipo cidade em formato texto. + + + + + + + + + Tipo CNPJ. + + + + + + + + Tipo código de serviço. + + + + + + + + Tipo código de serviço. + + + + + + + + Tipo complemento do endereço. + + + + + + + + + + Tipo CPF. + + + + + + + + Tipo Discriminação Serviços. + + + + + + + + + + Tipo E-mail. + + + + + + + + + + + Tipo padrão referente a inscrição municipal. + + + + + + + + Endereço. + + + + + + + + + + Tipo número. + + + + + + + + Tipo número do endereço. + + + + + + + + + + Tipo padrão para quantidades. + + + + + + + + Tipo Razão Social. + + + + + + + + + + Tipo série de documento. + + + + + + + + + + Tipo que indica se o pedido do serviço obteve sucesso. + + + + + + + Tipo referente ao tempo de processamento do lote. + + + + + + + + Tipo do endereço (Rua, Av, ...). + + + + + + + + + + Tipo UF. + + + + + + + + + + Tipo utilizado para valores com 15 dígitos, sendo 13 de corpo e 2 decimais. + + + + + + + + + + + Tipo Versão do Schema. + + + + + + + + Tipo referente aos possíveis tipos da NFTS. + + + + + + + + Dispensado de emissão de documento fiscal. + + + + + Com emissão de documento fiscal autorizado pelo município. + + + + + Sem emissão de documento fiscal embora obrigado. + + + + + Documento fiscal emitido por prestador de outro munícipio-recebido por consórcio de construção civil. + + + + + + + Tipo série de documento NFTS. + + + + + + + + + + Informe o número do documento da NTFS. + + + + + + + + Informe o número da NTFS. + + + + + + + + Tipo referente aos possíveis status de NFTS. + + + + + Normal. + + + + + Cancelada. + + + + + + + Tipo referente aos modos de tributação da NFTS. + + + + + Operação Normal. + + + + + Imune. + + + + + ISS Suspenso por Decisão Judicial. + + + + + + + Código do Subitem da lista de serviços. + + + + + + + + Informe a retenção. + true - ISS Retido pelo tomador. + false - NFTS sem ISS Retido. + + + + + + + Informe a retenção. + true - ISS Retido pelo intermediário. + false - NFTS sem ISS Retido. + + + + + + + Informe o município na qual o prestador está situado descumpre a Lei Complementar 157/2016 que determina que o valor mínimo do ISS deve ser de 2%. + true - Município descumpre lei. + false - Município não descumpre lei. + + + + + + + Tipo da NFTS. + 1 - Nota Fiscal do Tomador. + 2 - Nota Fiscal do Intermediário. + + + + + + + + Regime de Tributação. + 0 - Normal. + 4 - Simples Nacional. + 5 - Microempreendedor Individual MEI. + + + + + + + + Tipo código de evento. + + + + + + + + Tipo descrição do evento. + + + + + + + + + + Tipo Código de verificação da NFTS. + + + + + + + + + + Indica o número do lote gerado pelo processamento. + + + + + + + + + + + + + + Situação do cadastro no CPOM em formato texto. + + + + + + + + Situação da autorização de emissão da NFSE em formato texto. + + + + + + + + Situação da Inscrição Municipal em formato texto. + + + + + + + + + + + + Código do evento. + + + + + Descrição do enveto. + + + + + Informações que irão identificar o documento onde ocorreu o problema. + + + + + Informações que irão identificar a NFTS onde ocorreu o problema. + + + + + + + + + Número do lote gerado pelo processamento. + + + + + Dados do Remetente do Lote da mensagem XML transmitida. + + + + + Retorna a data de envio do lote (AAAA-MM-DDTHH:mm:ss). + + + + + Retorna a quantidade de NFTS processadas. + + + + + Retorna o tempo de processamento do lote. + + + + + Retorna o valor total dos serviços das NFTS contidos na mensagem XML. + + + + + + + Cabeçalho do retorno. + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + Informações sobre o lote processado. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + Cabeçalho do retorno para consultas. + + + + + Campo indicativo do sucesso do pedido do serviço. + + + + + + Informe a Versão do Schema XML utilizado. + + + + + + Lista de mensagens de retorno do lote + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + + + Lista de mensagens de retorno da NFTS + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + + + Lista de mensagens de retorno do lote + + + + + Elemento que representa a ocorrência de eventos de erro/alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a lista de NFTS. + + + + + + + Identificação da NFTS. + + + + + Indica a posição da NFTS no arquivo XML. + + + + + + + + + + Informe a chave do documento Inscrição Municipal/Sério/Número documento da NFTS. + + + + + + + Identificação da NFTS. + + + + + Indica a posição da NFTS no arquivo XML. + + + + + + + + + + Informe a chave do documento Inscrição Municipal/Número NFTS/Código de Verificação da NFTS. + + + + + + + Tipo que representa um CPF/CNPJ. + + + + + + + + + Dados do Remetende CPF/CNPJ + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML. + + + + + + + Dados do Remetende CPF/CNPJ e Inscricao Municipal + + + + + Informe o CPF/CNPJ do Remetente autorizado a transmitir a mensagem XML de cancelamento de NFTS. + + + + + Informe a Inscrição Municipal do Remetente autorizado a transmitir a mensagem XML de cancelamento de NFTS. + + + + + + + Chave de identificação da NFTS. + + + + + Informe a Inscrição Municipal da NFTS. + + + + + Informe a série da NFTS. + + + + + Informe o número do documento. + + + + + + + Chave de identificação da NFTS. + + + + + Informe a Inscrição Municipal da NFTS. + + + + + Número da NFS-e. + + + + + Código de verificação da NFTS. + + + + + + + Tipo Endereço. + + + + + + + + + + + + + + + Dados do Prestador de serviço + + + + + Informe o CPF/CNPJ do prestador do serviço. + Para prestador de serviço estrangeiro, não enviara esta TAG + Caso o campo ISS Retido esteja preenchido com 3, o preenchimento deste campo é com zeros + + + + + Informe a Inscrição Municipal do Prestador. + ATENÇÃO: Este campo só deverá ser preenchido para prestadores estabelecidos no município de São Paulo (CCM). + Quando este campo for preenchido, seu conteúdo será considerado como prioritário com relação ao campo de CPF/CNPJ do Prestador, sendo utilizado para identificar o Prestador e recuperar seus dados da base de dados da Prefeitura. + + + + + Informe o Nome/Razão Social do Prestador. + Este campo será ignorado caso seja fornecido um CPF/CNPJ ou a Inscrição Municipal do prestador pertença a São Paulo. + + + + + Informe o endereço do prestador. + O conteúdo destes campos será ignorado caso seja fornecido um CNPJ/CPF ou a Inscrição Municipal do prestador pertença ao município de São Paulo. + Se estes campos estiverem informados, serão considerados no caso de prestador sem Inscrição Municipal. Nesta situação os dados da Receita Federal não serão considerados. + Os dados da Receita Federal serão utilizados apenas se estes dados não estiverem informados. + + + + + Informe o e-mail do prestador. + + + + + + + Dados do tomador de serviço + + + + + CPF/CNPJ do tomador de serviço. + Para os casos em que a NFTS se tratar de nota fiscal do intermediário, obrigatório informar o CPF/CNPJ do tomador. + + + + + Informe o Nome/Razão Social do Tomador. + Para os casos em que a NFTS se tratar de nota fiscal do intermediário, obrigatório informar a Razão Social do tomador. + + + + + + + Tipo que representa uma NFTS. + + + + + Informe o tipo do documento NFTS + 01 - Dispensado de emissão de documento fiscal. + 02 - Com emissão de documento fiscal autorizado pelo município. + 03 - Sem emissão de documento fiscal embora obrigado + 05 - Documento fiscal emitido por prestador de outro munícipio-recebido por consórcio de construção civil. + + + + + Informe a série da NFTS. + + + + + Informe a data da prestação de serviços (Formato: AAAAMMDD). + + + + + Informe o Status da NFTS. + + + + + Informe o tipo de tributação da NFTS. + + + + + Informe o valor dos serviços. + + + + + Informe o valor das deduções. + + + + + Informe o código do serviço da NFTS. Este código deve pertencer à lista de serviços. + + + + + Informe o código do Subitem da lista de serviços. + + + + + Informe o valor da alíquota. + + + + + Informe true para retenção do tomador ou false para sem retenção. + + + + + Informe true para retenção de intermediário ou false para sem retenção. + + + + + Informe true se o município do prestador descumprir a Lei Complementar 137/2016 que determina que o valor mínimo do ISS é de 2%. + + + + + Informe o e-mail do prestador. + + + + + Informe o Regime de Tributação. + + + + + Informe a data em que o serviço foi pago ao prestador. + Esta informação somente será considerada para tomadores de serviço de Orgãos Públicos. + + + + + Informe a discriminação dos serviços. + + + + + Define se a NFTS é do tomador ou intermediário. + + + + + Dados do tomador de serviço. + + + + + Assinatura digital da NFTS. + + + + + Código do CEI – Cadastro específico do INSS. + + + + + Código que representa a matrícula da obra no sistema de cadastro de obras. + + + + + + + Tipo que representa uma NFTS de retorno. + + + + + Informe o tipo do documento NFTS + 01 - Dispensado de emissão de documento fiscal. + 02 - Com emissão de documento fiscal autorizado pelo município. + + + + + Dados do documento. Inscrição Municipal/Série/NúmeroDocumento. + + + + + Chave da NFTS. + + + + + Informe a data da prestação de serviços (Formato: AAAAMMDD). + + + + + Informe o Status da NFTS. + + + + + Informe o tipo de tributação da NFTS. + + + + + Informe o valor dos serviços. + + + + + Informe o valor das deduções. + + + + + Informe o código do serviço da NFTS. Este código deve pertencer à lista de serviços. + + + + + Informe o código do Subitem da lista de serviços. + + + + + Informe o valor da alíquota. + + + + + Informe true para retenção do tomador ou false para sem retenção. + + + + + Informe true para retenção de intermediário ou false para sem retenção. + + + + + Informe o e-mail do prestador. + + + + + Informe o Regime de Tributação. + + + + + Informe a data em que o serviço foi pago ao prestador. + Esta informação somente será considerada para tomadores de serviço de Orgãos Públicos. + + + + + Informe a discriminação dos serviços. + + + + + Dados do tomador de serviço. + + + + + + + Lista de mensagens de retorno + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + Elemento que representa os detalhes de retorno da consulta ao CPOM no processamento da mensagem XML. + + + + + + + + + + + + + + + + Código e descriçao da atividade + + + + + + + + + + + + + + + + + + + + + + + Lista de mensagens de retorno + + + + + Elemento que representa a ocorrência de eventos de alerta durante o processamento da mensagem XML. + + + + + Elemento que representa a ocorrência de eventos de erro durante o processamento da mensagem XML. + + + + + Elemento que representa os detalhes de retorno da consulta ao CPOM no processamento da mensagem XML. + + + + + + \ No newline at end of file diff --git a/schemas/paulistana/v02/TiposNFe_v01.xsd b/schemas/paulistana/v02/TiposNFe_v01.xsd new file mode 100644 index 0000000..cb15a38 --- /dev/null +++ b/schemas/paulistana/v02/TiposNFe_v01.xsd @@ -0,0 +1,975 @@ + + + + + + + Tipo utilizado para valor de alíquota + + + + + + + + + + Assinatura digital do RPS emitido. + O RPS deverá ser assinado digitalmente. O contribuinte deverá assinar uma cadeia de caracteres (ASCII) com informações do RPS emitido. + O certificado digital utilizado na assinatura de cancelamento deverá ser o mesmo utilizado na assinatura da mensagem XML. A cadeia de caracteres a ser assinada deverá conter 86 posições com as informações apresentadas a seguir: + Inscrição Municipal (CCM) do Prestador com 8 caracteres. Caso o CCM do Prestador tenha menos de 8 caracteres, o mesmo deverá ser completado com zeros à esquerda. + Série do RPS com 5 posições. Caso a Série do RPS tenha menos de 5 caracteres, o mesmo deverá ser completado com espaços em branco à direita. + Número do RPS com 12 posições. Caso o Número do RPS tenha menos de 12 caracteres, o mesmo deverá ser completado com zeros à esquerda. + Data da emissão do RPS no formato AAAAMMDD. + Tipo de Tributação do RPS com uma posição (sendo T: para Tributação no municipio de São Paulo; F: para Tributação fora do municipio de São Paulo; I: para Isento; J: para ISS Suspenso por Decisão Judicial). + Status do RPS com uma posição (sendo N: Normal, C: Cancelado; E: Extraviado). + ISS Retido com uma posição (sendo S: ISS Retido; N: Nota Fiscal sem ISS Retido). + Valor dos Serviços com 15 posições e sem separador de milhar e decimal. + Valor das Deduções com 15 posições e sem separador de milhar e decimal. + Código do Serviço com 5 posições. + CPF/CNPJ do tomador com 14 posições. Sem formatação (ponto, traço, barra, ....). Completar com zeros à esquerda caso seja necessário. Se o Indicador do CPF/CNPJ for 3 (não-informado), preencher com 14 zeros. + + + + + + + Assinatura digital de cancelamento da NFS-e. + Cada NFS-e a ser cancelada deverá ter sua respectiva assinatura de cancelamento. O contribuinte deverá assinar uma cadeia de caracteres (ASCII) com informações da NFS-e a ser cancelada. + O certificado digital utilizado na assinatura de cancelamento deverá ser o mesmo utilizado na assinatura da mensagem XML. A cadeia de caracteres a ser assinada deverá conter 20 posições com as informações apresentadas a seguir: + Inscrição Municipal (CCM) do Prestador com 8 caracteres. Caso o CCM do Prestador tenha menos de 8 caracteres, o mesmo deverá ser completado com zeros à esquerda. + Número da NFS-e RPS com 12 posições. Caso o Número da NFS-e tenha menos de 12 caracteres, o mesmo deverá ser completado com zeros à esquerda. + + + + + + + Tipo bairro. + + + + + + + + + + Tipo CEP. + + + + + + + + Tipo cidade. + + + + + + + + Tipo CNPJ. + + + + + + + + Tipo código de serviço. + + + + + + + + Tipo código de evento. + + + + + + + + Tipo Código de verificação da NFS-e. + + + + + + + + + + Tipo complemento do endereço. + + + + + + + + + + Tipo CPF. + + + + + + + + Tipo descrição do evento. + + + + + + + + + + Tipo Discriminação Serviços. + + + + + + + + + + Tipo E-mail. + + + + + + + + + + Tipo Inscrição Estadual. + + + + + + + + Tipo padrão referente a inscrição municipal. + + + + + + + + Endereço. + + + + + + + + + + Tipo número. + + + + + + + + Tipo número do endereço. + + + + + + + + + + Tipo referente às possíveis opções de escolha pelo Simples. + + + + + Não-optante pelo Simples Federal nem Municipal. + + + + + Optante pelo Simples Federal (Alíquota de 1,0%). + + + + + Optante pelo Simples Federal (Alíquota de 0,5%). + + + + + Optante pelo Simples Municipal. + + + + + + + Tipo padrão para quantidades. + + + + + + + + Tipo Razão Social. + + + + + + + + + + Tipo série de documento. + + + + + + + + + + Tipo referente aos possíveis status de NFS-e. + + + + + Normal. + + + + + Cancelada. + + + + + Extraviada. + + + + + + + Tipo que indica se o pedido do serviço obteve sucesso. + + + + + + + Tipo referente ao tempo de processamento do lote. + + + + + + + + Tipo do endereço (Rua, Av, ...). + + + + + + + + + + Tipo referente aos possíveis tipos de RPS. + + + + + Recibo Provisório de Serviços. + + + + + Recibo Provisório de Serviços proveniente de Nota Fiscal Conjugada (Mista). + + + + + Cupom. + + + + + + + Tipo referente aos modos de tributação da NFe. + + + + + + + + + + Tipo UF. + + + + + + + + + + Tipo utilizado para valores com 15 dígitos, sendo 13 de corpo e 2 decimais. + + + + + + + + + + + Tipo Versão do Schema. + + + + + + + + Tipo utilizado para o valor do percentual da carga tributária. + + + + + + + + + + Tipo utilizado para a fonte da carga tributária. + + + + + + + + + + + + + + Código do evento. + + + + + Descrição do enveto. + + + + + Chave para identificação da origem do evento. + + + + Chave do RPS. + + + + + Chave da NFe. + + + + + + + + Tipo que representa um CPF/CNPJ. + + + + + + + + + Tipo que representa a chave de uma NFS-e e a Chave do RPS que a mesma substitui. + + + + + Chave da NFS-e gerada. + + + + + Chave do RPS substituído. + + + + + + + Chave de identificação da NFS-e. + + + + + Inscrição municipal do prestador de serviços. + + + + + Número da NFS-e. + + + + + Código de verificação da NFS-e. + + + + + + + Tipo que define a chave identificadora de um RPS. + + + + + Inscrição municipal do prestador de serviços. + + + + + Série do RPS. + + + + + Número do RPS. + + + + + + + Tipo Endereço. + + + + + + + + + + + + + + + Informações do lote processado. + + + + + Número de lote. + + + + + Inscrição municipal do prestador dos RPS contidos no lote. + + + + + CNPJ do remetente autorizado a transmitir a mensagem XML. + + + + + Data/hora de envio do lote. + + + + + Quantidade de RPS do lote. + + + + + Tempo de processamento do lote. + + + + + Valor total dos serviços dos RPS contidos na mensagem XML. + + + + + Valor total das deduções dos RPS contidos na mensagem XML. + + + + + + + Tipo que representa uma NFS-e + + + + + Assinatura digital da NFS-e. + + + + + Chave de identificação da NFS-e. + + + + + Data de emissão da NFS-e + + + + + Número de lote gerador da NFS-e. + + + + + Chave do RPS que originou a NFS-e. + + + + + Tipo do RPS emitido. + + + + + Data de emissão do RPS que originou a NFS-e. + + + + + CPF/CNPJ do Prestador do serviço. + + + + + Nome/Razão Social do Prestador. + + + + + Endereço do Prestador. + + + + + E-mail do Prestador. + + + + + Status da NFS-e. + + + + + Data de cancelamento da NFS-e. + + + + + Tributação da NFS-e. + + + + + Opção pelo Simples. + + + + + Número da guia vinculada a NFS-e. + + + + + Data de quitação da guia vinculada a NFS-e. + + + + + Valor dos serviços prestados. + + + + + Valor das deduções. + + + + + Valor da retenção do PIS. + + + + + Valor da retenção do COFINS. + + + + + Valor da retenção do INSS. + + + + + Valor da retenção do IR. + + + + + Valor da retenção do CSLL. + + + + + Código do serviço. + + + + + Valor da alíquota. + + + + + Valor do ISS. + + + + + Valor do crédito gerado. + + + + + Retenção do ISS. + + + + + CPF/CNPJ do tomador do serviço. + + + + + Inscrição Municipal do Tomador. + + + + + Inscrição Estadual do tomador. + + + + + Nome/Razão Social do tomador. + + + + + Endereço do tomador. + + + + + E-mail do tomador. + + + + + CNPJ do intermediário de serviço. + + + + + Inscrição Municipal do intermediário de serviço. + + + + + Retenção do ISS pelo intermediário de serviço. + + + + + E-mail do intermediário de serviço. + + + + + Descrição dos serviços. + + + + + Valor da carga tributária total em R$. + + + + + Valor percentual da carga tributária. + + + + + Fonte de informação da carga tributária. + + + + + Código do CEI – Cadastro específico do INSS. + + + + + Código que representa a matrícula da obra no sistema de cadastro de obras. + + + + + Código da cidade do município da prestação do serviço. + + + + + Código que representa o número do encapsulamento. + + + + + Informe o valor total recebido. + + + + + + + Tipo que representa um RPS. + + + + + Assinatura digital do RPS. + + + + + Informe a chave do RPS emitido. + + + + + Informe o Tipo do RPS emitido. + + + + + Informe a Data de emissão do RPS. + + + + + Informe o Status do RPS. + + + + + Informe o tipo de tributação do RPS. + + + + + Informe o valor dos serviços prestados. + + + + + Informe o valor das deduções. + + + + + Informe o valor da retenção do PIS. + + + + + Informe o valor da retenção do COFINS. + + + + + Informe o valor da retenção do INSS. + + + + + Informe o valor da retenção do IR. + + + + + Informe o valor da retenção do CSLL. + + + + + Informe o código do serviço do RPS. Este código deve pertencer à lista de serviços. + + + + + Informe o valor da alíquota. Obs. O conteúdo deste campo será ignorado caso a tributação ocorra no município (Situação do RPS = T ). + + + + + Informe a retenção. + + + + + Informe o CPF/CNPJ do tomador do serviço. O conteúdo deste campo será ignorado caso o campo InscricaoMunicipalTomador esteja preenchido. + + + + + Informe a Inscrição Municipal do Tomador. ATENÇÃO: Este campo só deverá ser preenchido para tomadores estabelecidos no município de São Paulo (CCM). Quando este campo for preenchido, seu conteúdo será considerado como prioritário com relação ao campo de CPF/CNPJ do Tomador, sendo utilizado para identificar o Tomador e recuperar seus dados da base de dados da Prefeitura. + + + + + Informe a inscrição estadual do tomador. Este campo será ignorado caso seja fornecido um CPF/CNPJ ou a Inscrição Municipal do tomador pertença ao município de São Paulo. + + + + + Informe o Nome/Razão Social do tomador. Este campo é obrigatório apenas para tomadores Pessoa Jurídica (CNPJ). Este campo será ignorado caso seja fornecido um CPF/CNPJ ou a Inscrição Municipal do tomador pertença ao município de São Paulo. + + + + + Informe o endereço do tomador. Os campos do endereço são obrigatórios apenas para tomadores pessoa jurídica (CNPJ informado). O conteúdo destes campos será ignorado caso seja fornecido um CPF/CNPJ ou a Inscrição Municipal do tomador pertença ao município de São Paulo. + + + + + Informe o e-mail do tomador. + + + + + CNPJ do intermediário de serviço. + + + + + Inscrição Municipal do intermediário de serviço. + + + + + Retenção do ISS pelo intermediário de serviço. + + + + + E-mail do intermediário de serviço. + + + + + Informe a discriminação dos serviços. + + + + + Valor da carga tributária total em R$. + + + + + Valor percentual da carga tributária. + + + + + Fonte de informação da carga tributária. + + + + + Código do CEI – Cadastro específico do INSS. + + + + + Código que representa a matrícula da obra no sistema de cadastro de obras. + + + + + Código da cidade do município da prestação do serviço. + + + + + Código que representa o número do encapsulamento da obra. + + + + + Informe o valor total recebido. + + + + + diff --git a/schemas/paulistana/v02/xmldsig-core-schema_v01.xsd b/schemas/paulistana/v02/xmldsig-core-schema_v01.xsd new file mode 100644 index 0000000..2f090e6 --- /dev/null +++ b/schemas/paulistana/v02/xmldsig-core-schema_v01.xsd @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 7404ba1ed6b0bce7a5eae4ee1129574d074d382f Mon Sep 17 00:00:00 2001 From: Luis Malta Date: Wed, 19 Aug 2020 11:15:48 -0300 Subject: [PATCH 2/3] [FIX] Correct variables types --- nfselib/paulistana/v02/TiposNFe_v01.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nfselib/paulistana/v02/TiposNFe_v01.py b/nfselib/paulistana/v02/TiposNFe_v01.py index 8b2f561..4530516 100644 --- a/nfselib/paulistana/v02/TiposNFe_v01.py +++ b/nfselib/paulistana/v02/TiposNFe_v01.py @@ -363,8 +363,8 @@ def gds_validate_simple_patterns(self, patterns, target): for patterns1 in patterns: found2 = False for patterns2 in patterns1: - mo = re_.search(patterns2, target) - if mo is not None and len(mo.group(0)) == len(target): + mo = re_.search(patterns2, str(target)) + if mo is not None and len(mo.group(0)) == len(str(target)): found2 = True break if not found2: @@ -2659,7 +2659,7 @@ def __init__(self, Assinatura=None, ChaveRPS=None, TipoRPS=None, DataEmissao=Non self.TipoRPS = TipoRPS self.validate_tpTipoRPS(self.TipoRPS) if isinstance(DataEmissao, BaseStrType_): - initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%d').date() + initvalue_ = datetime_.datetime.strptime(DataEmissao, '%Y-%m-%dT%H:%M:%S').date() else: initvalue_ = DataEmissao self.DataEmissao = initvalue_ @@ -2761,6 +2761,7 @@ def validate_tpStatusNFe(self, value): warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on tpStatusNFe' % {"value" : value.encode("utf-8")} ) def validate_tpTributacaoNFe(self, value): # Validate type tpTributacaoNFe, a restriction on xs:string. + value = str(value) if value is not None and Validate_simpletypes_: if len(value) > 1: warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpTributacaoNFe' % {"value" : value.encode("utf-8")} ) @@ -2775,7 +2776,7 @@ def validate_tpValor(self, value): warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) if not self.gds_validate_simple_patterns( self.validate_tpValor_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (value.encode('utf-8'), self.validate_tpValor_patterns_, )) + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (str(value).encode('utf-8'), self.validate_tpValor_patterns_, )) validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] def validate_tpCodigoServico(self, value): # Validate type tpCodigoServico, a restriction on xs:int. @@ -2835,6 +2836,7 @@ def validate_tpPercentualCargaTributaria(self, value): warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpPercentualCargaTributaria' % {"value" : value} ) def validate_tpFonteCargaTributaria(self, value): # Validate type tpFonteCargaTributaria, a restriction on xs:string. + value = str(value) if value is not None and Validate_simpletypes_: if len(value) > 10: warnings_.warn('Value "%(value)s" does not match xsd maxLength restriction on tpFonteCargaTributaria' % {"value" : value.encode("utf-8")} ) From 6465d1a2adc73611c3720105215c6f78eba1f9d1 Mon Sep 17 00:00:00 2001 From: Luis Malta Date: Wed, 26 Aug 2020 12:31:56 -0300 Subject: [PATCH 3/3] [ADD] Namespaces for nota paulistana --- .../paulistana/v02/PedidoEnvioLoteRPS_v01.py | 25 ++++++------ nfselib/paulistana/v02/TiposNFe_v01.py | 39 ++++++++++--------- .../paulistana/v02/generatedsnamespaces.py | 6 +++ 3 files changed, 41 insertions(+), 29 deletions(-) create mode 100644 nfselib/paulistana/v02/generatedsnamespaces.py diff --git a/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py b/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py index 9688e8a..64f52e6 100644 --- a/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py +++ b/nfselib/paulistana/v02/PedidoEnvioLoteRPS_v01.py @@ -107,7 +107,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): # try: - from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ + from nfselib.paulistana.v02.generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ except ImportError: GenerateDSNamespaceDefs_ = {} try: @@ -165,7 +165,10 @@ def gds_validate_integer_list( raise_parse_error(node, 'Requires sequence of integers') return values def gds_format_float(self, input_data, input_name=''): - return ('%.15f' % input_data).rstrip('0') + if input_data.is_integer(): + return int(input_data) + else: + return ('%.15f' % input_data).rstrip('0') def gds_validate_float(self, input_data, node=None, input_name=''): return input_data def gds_format_float_list(self, input_data, input_name=''): @@ -817,7 +820,7 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', self.Cabecalho.export(outfile, level, namespaceprefix_, namespacedef_='', name_='Cabecalho', pretty_print=pretty_print) for RPS_ in self.RPS: showIndent(outfile, level, pretty_print) - outfile.write('<%sRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(RPS_), input_name='RPS')), namespaceprefix_ , eol_)) + RPS_.export(outfile, level, namespaceprefix_, namespacedef_='', pretty_print=pretty_print) if self.Signature is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sSignature>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.Signature), input_name='Signature')), namespaceprefix_ , eol_)) @@ -858,12 +861,12 @@ def __init__(self, Versao='1', CPFCNPJRemetente=None, transacao=True, dtInicio=N self.CPFCNPJRemetente = CPFCNPJRemetente self.transacao = transacao if isinstance(dtInicio, BaseStrType_): - initvalue_ = datetime_.datetime.strptime(dtInicio, '%Y-%m-%d').date() + initvalue_ = datetime_.datetime.strptime(dtInicio, '%Y-%m-%dT%H:%M:%S').date() else: initvalue_ = dtInicio self.dtInicio = initvalue_ if isinstance(dtFim, BaseStrType_): - initvalue_ = datetime_.datetime.strptime(dtFim, '%Y-%m-%d').date() + initvalue_ = datetime_.datetime.strptime(dtFim, '%Y-%m-%dT%H:%M:%S').date() else: initvalue_ = dtFim self.dtFim = initvalue_ @@ -916,7 +919,7 @@ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='C else: outfile.write('/>%s' % (eol_, )) def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='CabecalhoType'): - if self.Versao != "1" and 'Versao' not in already_processed: + if 'Versao' not in already_processed: already_processed.add('Versao') outfile.write(' Versao=%s' % (quote_attrib(self.Versao), )) def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='CabecalhoType', fromsubclass_=False, pretty_print=True): @@ -926,25 +929,25 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', eol_ = '' if self.CPFCNPJRemetente is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sCPFCNPJRemetente>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPFCNPJRemetente), input_name='CPFCNPJRemetente')), namespaceprefix_ , eol_)) + self.CPFCNPJRemetente.export(outfile, level, namespaceprefix_, pretty_print=pretty_print, name_='CPFCNPJRemetente') if not self.transacao: showIndent(outfile, level, pretty_print) outfile.write('<%stransacao>%s%s' % (namespaceprefix_ , self.gds_format_boolean(self.transacao, input_name='transacao'), namespaceprefix_ , eol_)) if self.dtInicio is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sdtInicio>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtInicio, input_name='dtInicio'), namespaceprefix_ , eol_)) + outfile.write('<%sdtInicio>%s%s' % (namespaceprefix_ , self.dtInicio, namespaceprefix_ , eol_)) if self.dtFim is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sdtFim>%s%s' % (namespaceprefix_ , self.gds_format_date(self.dtFim, input_name='dtFim'), namespaceprefix_ , eol_)) + outfile.write('<%sdtFim>%s%s' % (namespaceprefix_ , self.dtFim, namespaceprefix_ , eol_)) if self.QtdRPS is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sQtdRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.QtdRPS), input_name='QtdRPS')), namespaceprefix_ , eol_)) if self.ValorTotalServicos is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValorTotalServicos), input_name='ValorTotalServicos')), namespaceprefix_ , eol_)) + outfile.write('<%sValorTotalServicos>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_float(self.ValorTotalServicos, input_name='ValorTotalServicos')), namespaceprefix_ , eol_)) if self.ValorTotalDeducoes is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sValorTotalDeducoes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.ValorTotalDeducoes), input_name='ValorTotalDeducoes')), namespaceprefix_ , eol_)) + outfile.write('<%sValorTotalDeducoes>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_float(self.ValorTotalDeducoes, )), namespaceprefix_ , eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) diff --git a/nfselib/paulistana/v02/TiposNFe_v01.py b/nfselib/paulistana/v02/TiposNFe_v01.py index 4530516..aa00c99 100644 --- a/nfselib/paulistana/v02/TiposNFe_v01.py +++ b/nfselib/paulistana/v02/TiposNFe_v01.py @@ -107,7 +107,7 @@ def parsexmlstring_(instring, parser=None, **kwargs): # try: - from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ + from nfselib.paulistana.v02.generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_ except ImportError: GenerateDSNamespaceDefs_ = {} try: @@ -146,7 +146,7 @@ def gds_validate_string(self, input_data, node=None, input_name=''): else: return input_data def gds_format_base64(self, input_data, input_name=''): - return base64.b64encode(input_data) + return base64.b64encode(input_data).decode('ascii') def gds_validate_base64(self, input_data, node=None, input_name=''): return input_data def gds_format_integer(self, input_data, input_name=''): @@ -165,7 +165,10 @@ def gds_validate_integer_list( raise_parse_error(node, 'Requires sequence of integers') return values def gds_format_float(self, input_data, input_name=''): - return ('%.15f' % input_data).rstrip('0') + if input_data.is_integer(): + return int(input_data) + else: + return ('%.15f' % input_data).rstrip('0') def gds_validate_float(self, input_data, node=None, input_name=''): return input_data def gds_format_float_list(self, input_data, input_name=''): @@ -968,10 +971,10 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', eol_ = '' if self.CPF is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sCPF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CPF), input_name='CPF')), namespaceprefix_ , eol_)) + outfile.write('<%sCPF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(self.CPF, input_name='CPF')), namespaceprefix_ , eol_)) if self.CNPJ is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sCNPJ>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.CNPJ), input_name='CNPJ')), namespaceprefix_ , eol_)) + outfile.write('<%sCNPJ>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(self.CNPJ, input_name='CNPJ')), namespaceprefix_ , eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1295,13 +1298,13 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', eol_ = '' if self.InscricaoPrestador is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoPrestador, input_name='InscricaoPrestador'), namespaceprefix_ , eol_)) + outfile.write('<%sInscricaoPrestador>%s%s' % (namespaceprefix_ , self.gds_format_string(self.InscricaoPrestador, input_name='InscricaoPrestador'), namespaceprefix_ , eol_)) if self.SerieRPS is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sSerieRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.SerieRPS), input_name='SerieRPS')), namespaceprefix_ , eol_)) if self.NumeroRPS is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sNumeroRPS>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.NumeroRPS, input_name='NumeroRPS'), namespaceprefix_ , eol_)) + outfile.write('<%sNumeroRPS>%s%s' % (namespaceprefix_ , self.gds_format_string(self.NumeroRPS, input_name='NumeroRPS'), namespaceprefix_ , eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -1496,7 +1499,7 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', outfile.write('<%sUF>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.UF), input_name='UF')), namespaceprefix_ , eol_)) if self.CEP is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sCEP>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CEP, input_name='CEP'), namespaceprefix_ , eol_)) + outfile.write('<%sCEP>%s%s' % (namespaceprefix_ , self.gds_format_string(self.CEP, input_name='CEP'), namespaceprefix_ , eol_)) def build(self, node): already_processed = set() self.buildAttributes(node, node.attrib, already_processed) @@ -2776,7 +2779,7 @@ def validate_tpValor(self, value): warnings_.warn('Value "%(value)s" does not match xsd maxInclusive restriction on tpValor' % {"value" : value} ) if not self.gds_validate_simple_patterns( self.validate_tpValor_patterns_, value): - warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (str(value).encode('utf-8'), self.validate_tpValor_patterns_, )) + warnings_.warn('Value "%s" does not match xsd pattern restrictions: %s' % (str(value), self.validate_tpValor_patterns_, )) validate_tpValor_patterns_ = [['^0|0\\.[0-9]{2}|[1-9]{1}[0-9]{0,12}(\\.[0-9]{0,2})?$']] def validate_tpCodigoServico(self, value): # Validate type tpCodigoServico, a restriction on xs:int. @@ -2897,7 +2900,7 @@ def hasContent_(self): return True else: return False - def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='tpRPS', pretty_print=True): + def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='RPS', pretty_print=True): imported_ns_def_ = GenerateDSNamespaceDefs_.get('tpRPS') if imported_ns_def_ is not None: namespacedef_ = imported_ns_def_ @@ -2942,30 +2945,30 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', if self.TributacaoRPS is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sTributacaoRPS>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.TributacaoRPS), input_name='TributacaoRPS')), namespaceprefix_ , eol_)) - if self.ValorServicos is not None: + if self.ValorServicos is not None and self.ValorServicos != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorServicos, input_name='ValorServicos'), namespaceprefix_ , eol_)) if self.ValorDeducoes is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sValorDeducoes>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorDeducoes, input_name='ValorDeducoes'), namespaceprefix_ , eol_)) - if self.ValorPIS is not None: + if self.ValorPIS is not None and self.ValorPIS != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorPIS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorPIS, input_name='ValorPIS'), namespaceprefix_ , eol_)) - if self.ValorCOFINS is not None: + if self.ValorCOFINS is not None and self.ValorCOFINS != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorCOFINS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCOFINS, input_name='ValorCOFINS'), namespaceprefix_ , eol_)) - if self.ValorINSS is not None: + if self.ValorINSS is not None and self.ValorINSS != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorINSS>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorINSS, input_name='ValorINSS'), namespaceprefix_ , eol_)) - if self.ValorIR is not None: + if self.ValorIR is not None and self.ValorIR != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorIR>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorIR, input_name='ValorIR'), namespaceprefix_ , eol_)) - if self.ValorCSLL is not None: + if self.ValorCSLL is not None and self.ValorCSLL != 0: showIndent(outfile, level, pretty_print) outfile.write('<%sValorCSLL>%s%s' % (namespaceprefix_ , self.gds_format_float(self.ValorCSLL, input_name='ValorCSLL'), namespaceprefix_ , eol_)) if self.CodigoServico is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) + outfile.write('<%sCodigoServico>%s%s' % (namespaceprefix_ , self.gds_format_string(self.CodigoServico, input_name='CodigoServico'), namespaceprefix_ , eol_)) if self.AliquotaServicos is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sAliquotaServicos>%s%s' % (namespaceprefix_ , self.gds_format_float(self.AliquotaServicos, input_name='AliquotaServicos'), namespaceprefix_ , eol_)) @@ -2979,7 +2982,7 @@ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', outfile.write('<%sInscricaoMunicipalTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoMunicipalTomador, input_name='InscricaoMunicipalTomador'), namespaceprefix_ , eol_)) if self.InscricaoEstadualTomador is not None: showIndent(outfile, level, pretty_print) - outfile.write('<%sInscricaoEstadualTomador>%s%s' % (namespaceprefix_ , self.gds_format_integer(self.InscricaoEstadualTomador, input_name='InscricaoEstadualTomador'), namespaceprefix_ , eol_)) + outfile.write('<%sInscricaoEstadualTomador>%s%s' % (namespaceprefix_ , self.gds_format_string(self.InscricaoEstadualTomador, input_name='InscricaoEstadualTomador'), namespaceprefix_ , eol_)) if self.RazaoSocialTomador is not None: showIndent(outfile, level, pretty_print) outfile.write('<%sRazaoSocialTomador>%s%s' % (namespaceprefix_ , self.gds_encode(self.gds_format_string(quote_xml(self.RazaoSocialTomador), input_name='RazaoSocialTomador')), namespaceprefix_ , eol_)) diff --git a/nfselib/paulistana/v02/generatedsnamespaces.py b/nfselib/paulistana/v02/generatedsnamespaces.py new file mode 100644 index 0000000..4407a00 --- /dev/null +++ b/nfselib/paulistana/v02/generatedsnamespaces.py @@ -0,0 +1,6 @@ + +GenerateDSNamespaceDefs = { + "PedidoEnvioLoteRPS": 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns="http://www.prefeitura.sp.gov.br/nfe"', + "CabecalhoType": 'xmlns=""', + "tpRPS": 'xmlns=""', +}