Commit 515c10eb authored by iregon's avatar iregon
Browse files

unified relative imports

parent b6648218
# Following to access the subpackages main modules (or/and functions) directly wihout loops through the full subpackage path # Following to access the subpackages main modules (or/and functions) directly wihout loops through the full subpackage path
from .schemas import schemas as schemas from .schemas import code_tables
from .schemas import code_tables as code_tables from .schemas import schemas
from .tests import tests as tests
from .read import read as read from .read import read as read
__version__ = '1.1' __version__ = '1.1'
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from mdf_reader import properties
import numpy as np
import pandas as pd import pandas as pd
from .. import properties
# 1. dtype must be defined in dtype_properties.data_types # 1. dtype must be defined in dtype_properties.data_types
#>>> if not np.dtype('int8'): #>>> if not np.dtype('int8'):
#... print('No data type') #... print('No data type')
......
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import numpy as np import numpy as np
import pandas as pd
import string import string
import logging
from mdf_reader import properties from .. import properties
#for importer, modname, ispkg in pkgutil.walk_packages(path=package.__path__,prefix=package.__name__+'.',onerror=lambda x: None): #for importer, modname, ispkg in pkgutil.walk_packages(path=package.__path__,prefix=package.__name__+'.',onerror=lambda x: None):
......
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import pandas as pd import pandas as pd
def df_prepend_datetime(df,date_columns,date_format,date_name = "_datetime"): def df_prepend_datetime(df,date_columns,date_format,date_name = "_datetime"):
......
...@@ -9,7 +9,7 @@ Assumes we are never writing a header! ...@@ -9,7 +9,7 @@ Assumes we are never writing a header!
""" """
import pandas as pd import pandas as pd
import mdf_reader.common.logging_hdlr as logging_hdlr from . import logging_hdlr
logger = logging_hdlr.init_logger(__name__,level = 'DEBUG') logger = logging_hdlr.init_logger(__name__,level = 'DEBUG')
...@@ -18,7 +18,7 @@ def restore(TextParser_ref,TextParser_options): ...@@ -18,7 +18,7 @@ def restore(TextParser_ref,TextParser_options):
TextParser_ref.seek(0) TextParser_ref.seek(0)
TextParser = pd.read_csv( TextParser_ref, names = TextParser_options['names'],chunksize = TextParser_options['chunksize'], dtype = TextParser_options['dtype']) #, skiprows = options['skiprows']) TextParser = pd.read_csv( TextParser_ref, names = TextParser_options['names'],chunksize = TextParser_options['chunksize'], dtype = TextParser_options['dtype']) #, skiprows = options['skiprows'])
return TextParser return TextParser
except Exception as e: except:
logger.error('Failed to restore TextParser', exc_info=True) logger.error('Failed to restore TextParser', exc_info=True)
return TextParser return TextParser
...@@ -26,7 +26,7 @@ def is_not_empty(TextParser): ...@@ -26,7 +26,7 @@ def is_not_empty(TextParser):
try: try:
TextParser_ref = TextParser.f TextParser_ref = TextParser.f
TextParser_options = TextParser.orig_options TextParser_options = TextParser.orig_options
except Exception as e: except:
logger.error('Failed to process input. Input type is {}'.format(type(TextParser)), exc_info=True) logger.error('Failed to process input. Input type is {}'.format(type(TextParser)), exc_info=True)
return return
try: try:
......
...@@ -14,11 +14,12 @@ if ipython and want to get the interactive plots! ...@@ -14,11 +14,12 @@ if ipython and want to get the interactive plots!
@author: iregon @author: iregon
""" """
import mdf_reader
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
from .. import properties
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
def plot_numeric_validation(data,mask,element,valid_max,valid_min, units): def plot_numeric_validation(data,mask,element,valid_max,valid_min, units):
...@@ -87,7 +88,7 @@ def plot_model_validation(imodel): ...@@ -87,7 +88,7 @@ def plot_model_validation(imodel):
for element in imodel['atts'].keys(): for element in imodel['atts'].keys():
title_element = element if not isinstance(element,tuple) else element[1] + " (" + element[0] + ")" title_element = element if not isinstance(element,tuple) else element[1] + " (" + element[0] + ")"
dtype = imodel['atts'].get(element).get('column_type') dtype = imodel['atts'].get(element).get('column_type')
if dtype in mdf_reader.properties.numeric_types: if dtype in properties.numeric_types:
valid_max = imodel['atts'].get(element).get('valid_max') valid_max = imodel['atts'].get(element).get('valid_max')
valid_min = imodel['atts'].get(element).get('valid_min') valid_min = imodel['atts'].get(element).get('valid_min')
units = imodel['atts'].get(element).get('units') units = imodel['atts'].get(element).get('units')
......
...@@ -22,14 +22,13 @@ import json ...@@ -22,14 +22,13 @@ import json
import copy import copy
from io import StringIO as StringIO from io import StringIO as StringIO
import mdf_reader.schemas as schemas from . import schemas
import mdf_reader.properties as properties from . import properties
import mdf_reader.common.pandas_TextParser_hdlr as pandas_TextParser_hdlr from .common import pandas_TextParser_hdlr
from .reader import import_data
from mdf_reader.reader import import_data from .reader import get_sections
from mdf_reader.reader import get_sections from .reader import read_sections
from mdf_reader.reader import read_sections from .validate import validate
from mdf_reader.validate import validate
toolPath = os.path.dirname(os.path.abspath(__file__)) toolPath = os.path.dirname(os.path.abspath(__file__))
schema_lib = os.path.join(toolPath,'schemas','lib') schema_lib = os.path.join(toolPath,'schemas','lib')
...@@ -147,7 +146,6 @@ def read(source, data_model = None, data_model_path = None, sections = None,chun ...@@ -147,7 +146,6 @@ def read(source, data_model = None, data_model_path = None, sections = None,chun
# a list with a single dataframe or a pd.io.parsers.TextFileReader # a list with a single dataframe or a pd.io.parsers.TextFileReader
logging.info("Getting data string from source...") logging.info("Getting data string from source...")
TextParser = import_data.import_data(source, chunksize = chunksize, skiprows = skiprows) TextParser = import_data.import_data(source, chunksize = chunksize, skiprows = skiprows)
print(type(TextParser))
# 2.3. Extract, read and validate data in same loop # 2.3. Extract, read and validate data in same loop
logging.info("Extracting and reading sections") logging.info("Extracting and reading sections")
......
...@@ -47,10 +47,9 @@ import pandas as pd ...@@ -47,10 +47,9 @@ import pandas as pd
import os import os
import io import io
from mdf_reader import properties from .. import properties
def to_iterable_df(source,skiprows = None, chunksize = None): def to_iterable_df(source,skiprows = None, chunksize = None):
print('chunksize is {}'.format(str(chunksize)))
TextParser = pd.read_fwf(source,widths=[properties.MAX_FULL_REPORT_WIDTH],header = None, delimiter="\t", skiprows = skiprows, chunksize = chunksize) TextParser = pd.read_fwf(source,widths=[properties.MAX_FULL_REPORT_WIDTH],header = None, delimiter="\t", skiprows = skiprows, chunksize = chunksize)
if not chunksize: if not chunksize:
TextParser = [TextParser] TextParser = [TextParser]
......
...@@ -35,9 +35,10 @@ DEV NOTES: ...@@ -35,9 +35,10 @@ DEV NOTES:
import pandas as pd import pandas as pd
from io import StringIO as StringIO from io import StringIO as StringIO
import mdf_reader.properties as properties
from mdf_reader.common.converters import converters from .. import properties
from mdf_reader.common.decoders import decoders from ..common.converters import converters
from ..common.decoders import decoders
def extract_fixed_width(section_serie_bf,section_schema): def extract_fixed_width(section_serie_bf,section_schema):
# Read section elements descriptors # Read section elements descriptors
......
...@@ -3,14 +3,10 @@ ...@@ -3,14 +3,10 @@
""" """
Created on Thu Sep 13 15:14:51 2018 Created on Thu Sep 13 15:14:51 2018
""" """
from __future__ import print_function
from __future__ import absolute_import
# Import required libraries
import sys import sys
import json import json
import datetime import datetime
import copy
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import os import os
......
{
"0":"Increasing, then decreasing; atmopsheric pressure the same or higher than three hours ago",
"1":"Increasing, then steady; or increasing, then increasing more slowly - Atmospheric pressure now higher than three hours ago",
"2":"Increasing (steadily or unsteadily) - Atmospheric pressure now higher than three hours ago",
"3":"Decreasing or steady, then increasing; or increasing, then increasing more rapidly - Atmospheric pressure now higher than three hours ago",
"4":"Steady; atmopsheric pressure the same as three hours ago",
"5":"Decreasing, then increasing; atmospheric pressure the same ot lower than three hours ago",
"6":"Decreasing, then steady; or decreasing, then decreasing more slowly - Atmospheric pressure now lower than three hours ago",
"7":"Decreasing (steadily or unsteadily) - Atmospheric pressure now lower than three hours ago",
"8":"Steady or increasing, then decreasing; or decreasing, then decreasing more rapidly - Atmospheric pressure now lower than three hours ago"
}
{
"0":"Netherlands",
"1":"Norway",
"2":"US",
"3":"UK",
"4":"France",
"5":"Denmark",
"6":"Italy",
"7":"India",
"8":"Hong Kong",
"9":"New Zealand",
"00":"Netherlands",
"01":"Norway",
"02":"US",
"03":"UK",
"04":"France",
"05":"Denmark",
"06":"Italy",
"07":"India",
"08":"Hong Kong",
"09":"New Zealand",
"10":"Ireland",
"11":"Philippines",
"12":"Egypt",
"13":"Canada",
"14":"Belgium",
"15":"South Africa",
"16":"Australia",
"17":"Japan",
"18":"Pakistan",
"19":"Argentina",
"20":"Sweden",
"21":"Federal Republic of Germany",
"22":"Iceland",
"23":"Israel",
"24":"Malaysia",
"25":"USSR",
"26":"Finland",
"27":"Rep. of Korea",
"28":"New Caledonia",
"29":"Portugal",
"30":"Spain",
"31":"Thailand",
"32":"Yugoslavia",
"33":"Poland",
"34":"Brazil",
"35":"Singapore",
"36":"Kenya",
"37":"Tanzania",
"38":"Uganda",
"39":"Mexico",
"40":"German Democractic Republic",
"AF":"Afghanistan",
"AL":"Albania",
"DZ":"Algeria",
"AD":"Andorra",
"AO":"Angola",
"AG":"Antigua and Barbuda",
"AR":"Argentina",
"AM":"Armenia",
"AW":"Aruba",
"AU":"Australia",
"AT":"Austria",
"AZ":"Azerbaijan",
"BS":"Bahamas",
"BH":"Bahrain",
"BD":"Bangladesh",
"BB":"Barbados",
"BY":"Belarus",
"BE":"Belgium",
"BZ":"Belize",
"BJ":"Benin",
"BT":"Bhutan",
"BO":"Bolivia",
"BA":"Bosnia and Herzegovina",
"BW":"Botswana",
"BR":"Brazil",
"BN":"Brunei Darussalam",
"BG":"Bulgaria",
"BF":"Burkina Faso",
"BI":"Burundi",
"KH":"Cambodia",
"CM":"Cameroon",
"CA":"Canada",
"CV":"Cape Verde",
"CF":"Central African Republic",
"TD":"Chad",
"CL":"Chile",
"CN":"China",
"CO":"Columbia",
"KM":"Comoros",
"CG":"Congo",
"CD":"The Democratic Republic of the Congo",
"CR":"Costa Rica",
"CI":"Cote d'Ivoire",
"HR":"Croatia",
"CU":"Cuba",
"CY":"Cyprus",
"CZ":"Czech Republic",
"DK":"Denmark",
"DJ":"Djibouti",
"DM":"Dominica",
"DO":"Dominican Republic",
"EC":"Ecuador",
"EG":"Egypt",
"SV":"El Salvador",
"GQ":"Equatorial Guinea",
"ER":"Eritrea",
"EE":"Estonia",
"ET":"Ethiopia",
"FJ":"Fiji",
"FI":"Finland",
"FR":"France",
"GA":"Gabon",
"GM":"Gambia",
"GE":"Georgia",
"DE":"Germany",
"GH":"Ghana",
"GR":"Greece",
"GD":"Grenada",
"GT":"Guatemala",
"GN":"Guinea",
"GW":"Guinea Bissau",
"GY":"Guyana",
"HT":"Haiti",
"HN":"Honduras",
"HK":"Hong Kong",
"HU":"Hungary",
"IS":"Iceland",
"IN":"India",
"ID":"Indonesia",
"IR":"Islamic Republic of Iran",
"IQ":"Iraq",
"IE":"Ireland",
"IL":"Israel",
"IT":"Italy",
"JM":"Jamaica",
"JP":"Japan",
"JO":"Jordan",
"KZ":"Kazakhstan",
"KE":"Kenya",
"KI":"Kiribati",
"KR":"Republic of Korea",
"KW":"Kuwait",
"KG":"Kyrgyzstan",
"LA":"Lao Peoples Democratic Republic",
"LV":"Latvia",
"LB":"Lebanon",
"LS":"Lesotho",
"LR":"Liberia",
"LY":"Libyan Arab Jamahiriya",
"LT":"Lithuania",
"LU":"Luxembourg",
"MK":"The Former Yugoslav Republic of Macedonia",
"MG":"Madagascar",
"MW":"Malawi",
"MY":"Malaysia",
"MV":"Maldives",
"ML":"Mali",
"MT":"Malta",
"MH":"Marshal Islands",
"MR":"Mauritania",
"MU":"Mauritius",
"MX":"Mexico",
"FM":"Federated States of Micronesia",
"MD":"Republic of Moldova",
"MC":"Monaco",
"MN":"Mongolia",
"MA":"Morocco",
"MZ":"Mozambique",
"MM":"Myanmar",
"NA":"Namibia",
"NR":"Nauru",
"NP":"Nepal",
"NL":"Netherlands",
"AN":"Netherlands Antilles",
"NZ":"New Zealand",
"NI":"Nicaragua",
"NE":"Niger",
"NG":"Nigeria",
"KP":"Democratic People's Republic of Korea",
"NO":"Norway",
"OM":"Oman",
"PK":"Pakistan",
"PW":"Palau",
"PS":"Occupied Palestinian Territory",
"PA":"Panama",
"PG":"Papua New Guinea",
"PY":"Paraguay",
"PE":"Peru",
"PH":"Philippines",
"PL":"Poland",
"PT":"Portugal",
"QA":"Qatar",
"RO":"Romania",
"RU":"Russian Federation",
"RW":"Rwanda",
"KN":"Saint Kitts and Nevis",
"LC":"Saint Lucia",
"VC":"Saint Vincent and the Grenadines",
"WS":"Samoa",
"SM":"San Marino",
"ST":"Sao Tome And Principe",
"SA":"Saudi Arabia",
"SN":"Senegal",
"CS":"Serbia and Montenegro",
"SC":"Seychelles",
"SL":"Sierra Leone",
"SG":"Singapore",
"SK":"Slovakia",
"SI":"Slovenia",
"SB":"Solomon Islands",
"SO":"Somalia",
"ZA":"South Africa",
"ES":"Spain",
"LK":"Sri Lanka",
"SD":"Sudan",
"SR":"Surinam",
"SZ":"Swaziland",
"SE":"Sweden",
"CH":"Switzerland",
"SY":"Syrian Arab Republic",
"TJ":"Tajikistan",
"TZ":"United Republic of Tanzania",
"TH":"Thailand",
"TL":"Timor - Leste",
"TG":"Togo",
"TO":"Tonga",
"TT":"Trinidad and Tobago",
"TN":"Tunisia",
"TR":"Turkey",
"TM":"Turkmenistan",
"TV":"Tuvala",
"UG":"Uganda",
"UA":"Ukraine",
"AE":"United Arab Emirates",
"GB":"United Kingdom",
"US":"United States",
"UY":"Uruguay",
"UZ":"Uzbekistan",
"VU":"Vanuatu",
"VA":"Vatican City",
"VE":"Venezuela",
"VN":"Viet Nam",
"YE":"Yemen",
"ZM":"Zambia",
"ZW":"Zimbabwe",
"DD":"East Germany",
"CS":"Serbia and Montenegro",
"RU":"Soviet Union",
"NC":"New Caledonia",
"ZY":"None (self recruited)",
"ZZ":"None (third party support)",
"TW":"Taiwan (Province of China)",
"SU":"Soviet Union",
"YU":"Yugoslavia",
"XX":"Multiple recruitment",
"EU":"European Union"
}
{
"0":"No Cirrus, Cirrocumulus or Cirrostratus",
"1":"Cirrus in the form of filaments, strands or hooks, not progressively invading the sky",
"2":"Dense Cirrus, in patches or entangled sheaves, which usually do not increase and sometimes seem to be the remains of the upper part of a Cumulonimbus, or Cirrus with sproutings in the form of small turrets or battlements, or Cirrus having the appearance of cumuliform tufts",
"3":"Dense Cirrus, often in the form of an anvil, being the remains of the upper parts of Cumulonimbus",
"4":"Cirrus in the form of hooks or of filaments, or both, progressively invading the sky; they generally become denser as a whole",
"5":"Cirrus (often in bands converging towards one point or two opposite points of the horizon) and Cirrostratus, or Cirrostratus alone; in either case, they are progressively invading the sky, and generally growing denser as a whole, but the continuous veil does not reach 45 degrees above the horizon.",
"6":"Cirrus (often in bands converging towards one point or two opposite points of the horizon) and Cirrostratus, or Cirrostratus alone; in either case, they are progressively invading the sky, and generally growing denser as a whole; the continuous veil extends more than 45 degrees above the horizon, without the sky being totally covered",
"7":"Veil of Cirrostratus covering the celestial dome",
"8":"Cirrostratus not progressively invading the sky and not completely covering the celestial dome",
"9":"Cirrocumulus alone, or Cirrocumulus accompanied by Cirrus or Cirrostratus, or both, but Cirrocumulus is predominant",
"10":"Cirrus, Cirrocumulus and Cirrostratus invisible owing to darkness, fog, blowing dust or sand, or other similar phenomena, or more often because of the presence of a continuous layer of lower clouds"
}
{
"0":"No Cirrus, Cirrocumulus or Cirrostratus",
"1":"Cirrus in the form of filaments, strands or hooks, not progressively invading the sky",
"2":"Dense Cirrus, in patches or entangled sheaves, which usually do not increase and sometimes seem to be the remains of the upper part of a Cumulonimbus, or Cirrus with sproutings in the form of small turrets or battlements, or Cirrus having the appearance of cumuliform tufts",
"3":"Dense Cirrus, often in the form of an anvil, being the remains of the upper parts of Cumulonimbus",
"4":"Cirrus in the form of hooks or of filaments, or both, progressively invading the sky; they generally become denser as a whole",
"5":"Cirrus (often in bands converging towards one point or two opposite points of the horizon) and Cirrostratus, or Cirrostratus alone; in either case, they are progressively invading the sky, and generally growing denser as a whole, but the continuous veil does not reach 45 degrees above the horizon.",
"6":"Cirrus (often in bands converging towards one point or two opposite points of the horizon) and Cirrostratus, or Cirrostratus alone; in either case, they are progressively invading the sky, and generally growing denser as a whole; the continuous veil extends more than 45 degrees above the horizon, without the sky being totally covered",
"7":"Veil of Cirrostratus covering the celestial dome",
"8":"Cirrostratus not progressively invading the sky and not completely covering the celestial dome",
"9":"Cirrocumulus alone, or Cirrocumulus accompanied by Cirrus or Cirrostratus, or both, but Cirrocumulus is predominant",
"10":"Cirrus, Cirrocumulus and Cirrostratus invisible owing to darkness, fog, blowing dust or sand, or other similar phenomena, or more often because of the presence of a continuous layer of lower clouds"
}
{
"0":"No Altocumulus, Altostratus or Nimbostratus",
"1":"Altostratus, the greater part of which is semitransparent; through this part the sun or moon may be weakly visible, as through ground glass",
"2":"Altostratus, the greater part of which is sufficiently dense to hide the sun or moon, or Nimbostratus",
"3":"Altocumulus, the greater part of which is semitransparent; the various elements of the cloud change only slowly and are all at a single level",
"4":"Patches (often in the form of almonds or fish) of Altocumulus, the greater part of which is semi-transparent; the clouds occur at one or more levels and the elements are continually changing in appearance",
"5":"Altocumulus clouds generally thicken as a whole; Semi-transparent Altocumulus in bands, or Altocumulus, in one or more fairly continuous layer (semi-transparent or opaque), progresively invading the sky; these Altocumulus clouds generally thicken as a whole",
"6":"Altocumulus resulting from the spreading out of Cumulus (or Cumulonimbus)",
"7":"Altocumulus in two or more layers, usually opaque in places, and not progressively invading the sky; or opaque layer of Altocumulus, not progressively invading the sky; or Altocumulus together with Altostratus or Nimbostratus",
"8":"Altocumulus with sproutings in the form of small towers or battlements, or Altocumulus having the appearance of cumuliform tufts",
"9":"Altocumulus of a chaotic sky, generally at several levels",
"10":"Altocumulus, Altostratus and Nimbostratus invisible owing to darkness, fog, blowing dust or sand, or other similar phenomena, or more often because of the presence of a continuous layer of lower clouds"
}
{
"0":"36-point compass",
"1":"32-point compass",
"2":"16 of 36-point compass",
"3":"16 of 32-point compass",
"4":"8-point compass",
"5":"360-point compass",
"6":"high resolution data (e.g., tenths of degrees)"
}
{
"0":"measured",
"1":"computed",
"2":"iced measured",
"3":"iced computed"
}
{
"0":"0",
"1":"45",
"2":"90",
"3":"135",
"4":"180",
"5":"225",
"6":"270",
"7":"315",
"8":"360",
"9":"NULL"
}
{
"0":"0",
"1":"50",
"2":"100",
"3":"200",
"4":"300",
"5":"600",
"6":"1000",
"7":"1500",
"8":"2000",
"9":"2500",
"10":"NULL"
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment