# -*- coding: utf-8 -*- """report_objects module """ import json import matplotlib import re from gluon import current from gluon.storage import Storage from pandas import DataFrame, MultiIndex, to_datetime from plugin_dbui import get_id, Store from pydal.helpers.methods import smart_query from pydal.objects import FieldVirtual from selector import EvtSelector from StringIO import StringIO MSG_NO_DATAINDEX = "The property dataIndex is required when eval is used." MSG_NO_EVT_ID = "Identifier of the event is not defined." MSG_NO_XTYPE = "The property xtype is missing." REG_DBFIELD = re.compile("\w+\.\w+(?:\.\w+)?", re.UNICODE) REG_EVT_ID = re.compile("history\.id_events *={1,2} *(\d+)") REG_PYQUERY = re.compile("[\( ]*\w+\.\w+\.\w+") REG_SINGLE_DBFIELD = re.compile("^ *\w+\.\w+(\.\w+)? *$", re.UNICODE) def do_title(report): """Build the report title. Args: report (BaseReport): the report Returns: str: """ db = report.db config = report.config selector = report.selector T = current.T # from the configuration title = (config.title if config.title else config.name) # add meta data metadata = [] if selector.id_teams: metadata.append(db.teams[selector.id_teams].team) if selector.id_projects: metadata.append(db.projects[selector.id_projects].project) if selector.category: metadata.append(selector.category) if selector.id_people_categories: code = db.people_categories[selector.id_people_categories].code metadata.append(code) # add period if selector.year_start and not selector.year_end: metadata.append(str(selector.year_start)) elif selector.year_start and selector.year_end: years = (str(selector.year_start), str(selector.year_end)) metadata.append(T("from %s to %s", lazy=False) % years) return "%s: %s" % (title, " / ".join(metadata)) def get_value(row, tablename, fieldname, keyname="", **kwargs): """Helper function returning the value of a database field. The method is designed to handle standard and JSON-type database field. The field is identified by its ``tablename``, ``fieldname`` and ``keyname``. Args: row (gluon.dal.Row): one row of the tablename table. tablename (str): name of a database table. fieldname (str): name of database field. keyname (str): key for the JSON type field. Returns: * ``row[tablename][fieldname]`` or ``row[fieldname]`` when tablename and fieldname are defined * ``row[tablename][fieldname][keyname]`` for JSON type field * ``kwargs[tablename]`` when fieldname and keyname are not defined * ``None`` when the field address does not exist in the row """ undefined = None # force value if tablename and (not fieldname) and (tablename in kwargs): return kwargs[tablename] # field is addressed in the row by tablename and by fieldname if tablename in row: value = row[tablename][fieldname] # field is addressed in the row by its fieldname only elif fieldname in row: value = row[fieldname] else: return undefined # deal with the keyname # it has been design for JSON-type field containing a dictionary if not keyname: return value elif keyname and keyname in value: return value[keyname] return undefined def split_dbfield(value): """Helper function to decode database field name as 3-elements tuple. The name of a database field is encoded as ``table.field`` or ``table.field.key``. The latter syntax is used for JSON type field. The function decodes as a 3-elements tuple (``tablename``, ``fieldname``, ``keyname``). Args: value (string): the name of the database field, either ``tablename.fieldname`` or ``tablename.fieldname.key`` Returns: tuple: ``(tablename, fieldname, keyname)`` where the ``keyname`` is either a string or an empty string. """ li = value.split(".") if len(li) == 1: li.extend(["", ""]) elif len(li) == 2: li.append("") return tuple(li) class ReportException(BaseException): pass class BaseReport(object): """Base class to build list, metric or graph reports. Args: table (gluon.dal.Table): table containing configurations for reports. id_report (int): identifier of the report in the table. """ def __init__(self, table, id_report): db = table._db self.db = db self.df = None self.rows = None # Extract the configuration for the report configuration config = table[id_report] # Extract the event identifier located in the condition field conditions = config.conditions mtch = REG_EVT_ID.search(conditions) if mtch is None: raise ReportException(current.T(MSG_NO_EVT_ID)) id_event = int(mtch.group(1)) # Instantiate the selector virtdb = current.globalenv["virtdb"] selector = EvtSelector(virtdb.selector, id_event) # apply the condition criteria used to filter the history records # condition can be written as a smart query: history.id_events == 7 # or like a python query: db.events.event == "People" # minimal protection to avoid injection flow # the beginning of the python query should be like: # db.table.field # (db.table.field # ((db.table.field # ( ( db.table.field # if REG_PYQUERY.match(conditions): q_conditions = eval(conditions, None, {"db": db}) else: q_conditions = smart_query(db.history, conditions) selector.append_query(q_conditions) # keep track of configuration and selector self.config = config self.selector = selector def _do_data(self, maps): """Build a temporarily list with the raw data for each series. This method handle the "year" database field. Args: maps (list): the database field map (tablename, fieldname, keyname). One per series. Returns: list: """ data = [] db = self.db selector = self.selector query = selector.query # limit the list of database fields to speed up processing: # - keep those required by the user # - remove virtual field # - add standard fields require to compute virtual fields dbfields = [db[el[0]][el[1]] for el in maps if el[1]] dbfields = [el for el in dbfields if not isinstance(el, FieldVirtual)] dbfields.extend([db.history.end_date, db.history.id_domains, db.history.id_people, db.history.id_teams, db.history.percentage, db.history.start_date, db.people.birth_date]) # the year axis is on # scan the database and compute virtual field on the year basis if self._is_year(maps): # get the year range for year in selector.get_years(): selector.set_year(year) for row in db(query(db.history)).select(*dbfields): values = [get_value(row, *elt, year=year) for elt in maps] data.append(values) # standard scan else: for row in db(query(db.history)).select(*dbfields): values = [get_value(row, *elt) for elt in maps] data.append(values) return data def _is_year(self, maps): """ Args: maps (list): the database field map (tablename, fieldname, keyname). One per series. Returns: bool: ``True`` is the pseudo field ``year`` is in maps """ li = [True for el in maps if el[0] == "year"] return (True if li else False) def to_df(self): """Return the pandas DataFrame. Returns: pandas.DataFrame: """ return self.df class Graph(BaseReport): """Build a report as a graph. Any data encapsulated in list, 1-dim or 2-dim metrics can be displayed as a graph. The rendering is performed by the matplotlib library. Therefore, many representations of the data are possible: plot, histogram, bar charts, error charts, scater plots, *etc*. Args: table (gluon.dal.Table): table containing configurations for reports. id_report (int): identifier of the report in the table. backend (str): the name of the matplotlib backend uses to produce figure. """ def __init__(self, table, id_report, backend="Agg"): self.db = table._db self.config = config = table[id_report] self.df = None self.rows = None # set the matplotlib back end # # NOTE: the X11 back end is not needed on the server side. In addition # Tkinter crash with the message "out of stack space" when the 2nd plot # is generated. # The documentation recommend to limit the matplotlib back end to Agg # which is tuned to render high quality PNG image. But, it is also # design to render PDF and SVG image without the X11 interface. # matplotlib.use(backend) # split the plot configuration in two parts: # 1) keywords for the DataFrame.plot method # 2) steering parameter for this class config.plot = json.loads(config.plot) config.steer = Storage() for k in ("index", "transpose", "xlabel", "ylabel"): v = config.plot.pop(k, None) config.steer[k] = v # instantiate the DataFrame for the report db = self.db report_type = config.report_type report_name = config.report_name report_id = get_id(db[report_type], name=report_name) if report_type == "lists": report = List(db.lists, report_id) elif report_type == "metrics1d": report = Metric1D(db.metrics1d, report_id) elif report_type == "metrics2d": report = Metric2D(db.metrics2d, report_id) self.df = report.to_df() self.selector = report.selector # build the graph from the DataFrame self._do_graph() self._do_labels() self._do_legend() self._do_tick() def _do_graph(self): """Build the graph from the ``DataFrame`` structure. """ config = self.config df = self.df plot, steer = config.plot, config.steer # transpose if steer.transpose: df = df.T # generate the plot using a specific set of columns if steer.index and len(steer.index) <= len(df.columns): ax = df.ix[:, steer.index].plot(**plot) # generate the plot using all columns else: ax = df.ix[:, :].plot(**plot) # persistence self.ax = ax def _do_labels(self): """Deal with axes label. """ ax = self.ax steer = self.config.steer if steer.xlabel: ax.set_xlabel(steer.xlabel, x=1, horizontalalignment="right") if steer.ylabel: ax.set_ylabel(steer.ylabel, y=1, horizontalalignment="right") def _do_legend(self): """Deal with legend. """ ax = self.ax if ax.get_legend(): box = ax.get_position() ax.set_position([box.x0, box.y0, box.width, box.height * 0.9]) ax.legend(loc="lower right", bbox_to_anchor=(1.01, 1.), fontsize=10, ncol=3) def _do_tick(self): """Polish the tick mark """ ax = self.ax ax.minorticks_on() ax.tick_params(which="major", length=8) ax.tick_params(which="minor", length=4) def _savefig(self, fmt): """Save the figure as a string. Args: fmt (str): possible values are pdf, png and svg. """ fig = self.ax.get_figure() fi = StringIO() fig.savefig(fi, format=fmt) data = fi.getvalue() fi.close() fig.clear() matplotlib.pyplot.close(fig) return data def to_pdf(self): """Encode the graph using the PDF format Returns: str: """ return self._savefig("pdf") def to_png(self): """Encode the graph using the PNG format. Returns: str: """ return self._savefig("png") def to_svg(self): """Encode the graph using the SVG format. Returns: str: """ return self._savefig("svg") class List(BaseReport): """Build a report as a list. A list is a table in which each column contains the values of one database field. The rows can be grouped per value of a given column. Summary information can be computed for each group as well as for the whole table. The list is displayed as the ``Dbui.grid.Panel`` widget. The configuration of the list columns is the configuration of the ``Dbui.grid.Panel`` object. More technically, this class interfaces the database and the ``Dbui.grid.Panel`` thought the underlying ``Ext.data.Store``. Its configuration is returned by the method *to_store*. Args: table (gluon.dal.Table): table containing configurations for reports. id_report (int): identifier of the report in the table. """ def __init__(self, table, id_report): BaseReport.__init__(self, table, id_report) # decode column configuration columns = [Storage(el) for el in json.loads(self.config.columns)] # check column configuration # add database field map (tablename, fieldname, keyname) # add the dataIndex (DataFrame, Ext.data.Store, Ext.grid.Panel) map(self._check_column, columns) # columns are persistent self._columns = columns # instantiate and fill the DataFrame self._do_metric() def _cast_type(self, column, dbfield, xtype): """Cast the type of a dataframe column to the database field type or to the grid column xtype. The type of the column determine by the pandas might be wrong. This append when events are merged with different user block. This is fine in most of the case but not with computed column. In that case the eval computation crashed. This method avoid this problem. It also convert properly datetime column allowing computation with them. Args: column (str): the index of the column in the DataFrame dbfield (tuple): address of the database field encoded as (tablename, fieldname, keyname). xtype (str): the xtype of the grid column. Possible values are ``booleancolumn``, ``datecolumn``, ``gridcolumn`` and ``numbercolumn``. """ df = self.df tablename, fieldname = dbfield[0:2] # the dtype of column containing a mixture of type is object. if (tablename == "year") or (df[column].dtype != "object"): return dbtype = self.db[tablename][fieldname].type # the dtype for column containing string is also object if dbtype in ("string", "text"): return elif dbtype == "boolean": df[column] = df[column].astype("bool") elif dbtype in ("date", "datetime", "time"): df[column] = to_datetime(df[column]) elif dbtype in ("double", "integer"): df[column] = df[column].astype("float64") # database field containing JSON-type dictionary # The type of the key is defined in the event model but it is # not accessible at this stage. Instead we use the grid column xtype. elif dbtype == "json": if xtype == "gridcolumn": pass elif xtype == "booleancolumn": df[column] = df[column].astype("bool") elif xtype == "datecolumn": df[column] = to_datetime(df[column]) elif xtype == "numbercolumn": df[column] = df[column].astype("float64") def _check_column(self, column): """Check column configuration: - Raise an exception if xtype is not defined - Raise an exception when eval is defined but not the dataIndex - Add the database field map - Add the dataIndex if not defined Args: column (gluon.storage.Storage): """ T = current.T xtype = column.xtype if not xtype: raise ReportException(T(MSG_NO_XTYPE)) if column.eval and not column.dataIndex: raise ReportException(T(MSG_NO_DATAINDEX)) dbfield = column.dbfield if dbfield: column.map = split_dbfield(dbfield) if not (column.dataIndex or xtype == "rownumberer"): column.dataIndex = column.dbfield.replace(".", "") def _do_metric(self): """Interface the database with the DataFrame structure. This method handle the ``year`` database field. """ columns = self._columns # extract columns associated to database fields maps, index, xtypes = [], [], [] for column in columns: if column.dbfield: maps.append(column.map) index.append(column.dataIndex) xtypes.append(column.xtype) # extract data from the database data = self._do_data(maps) # protection if not data: self.df = DataFrame(columns=index) return # fill the DataFrame df = DataFrame(data, columns=index) # make the data frame persistent self.df = df # cast dataframe column type to database type or grid column xtype map(self._cast_type, index, maps, xtypes) # add computed columns for el in columns: if el.eval: df[el.dataIndex] = df.eval(el.eval) # re-order the column to follow user requirement # skip rownumberer column (None index) index = [el.dataIndex for el in columns if el.dataIndex] df = df[index] def _set_store_data(self): """Generate the ``Ext.data.Store.data`` property. It is a list of dictionaries. Each of them contains the data for one row. One key, value pair for each ``Ext.data.Field`` where the key is the name of the ``Ext.data.Field``. """ # extract the list of records as a JSON-string # at this stage date/time are converted as an ISO8601 string data = self.df.to_json(orient="records", date_format="iso") # convert the JSON-string into a list self._store.data = json.loads(data) def _set_store_fields(self): """Generate the ``Ext.data.Store.fields`` property. It is a list of ``Ext.data.Field`` configuration. Note: The name of the ``Ext.data.Field`` is derived from the address of the database field. The former can not contains dot. Therefore, it is obtained by removing dot in the database field address. """ db = self.db columns = self._columns store = self._store # convert the columns into the configuration of an Ext.data.Field for el in columns: # protection against rownumberer column if not el.dataIndex: continue tablename, fieldname, keyname = el.map cfg = Storage(name=el.dataIndex) # the pseudo field year if el.dbfield == "year": cfg.type = "int" # the computed column elif el.eval: cfg.type = "float" # json type database field elif keyname: xtype = el.xtype if xtype == "gridcolumn": cfg.type = "string" elif xtype == "booleancolumn": cfg.type = "boolean" elif xtype == "datecolumn": cfg.type = "date" elif xtype == "numbercolumn": cfg.type = "float" # standard database field, extract the type from the database field else: dbfield = db[tablename][fieldname] cfg.type = dbfield.type if dbfield.type in ("blob", "string", "text", "json"): cfg.type = "string" elif dbfield.type == "boolean": cfg.type = "boolean" elif dbfield.type in ("date", "datetime", "time"): cfg.type = "date" cfg.dateFormat = "c" elif dbfield.type == "double": cfg.type = "float" elif dbfield.type == "integer": cfg.type = "int" store.fields.append(cfg) def to_grid(self): """Build the configuration for the ``Dbui.grid.Panel``. Returns: gluon.storage.Storage: the keys are ``columns`` and ``features`` and the corresponding values are list of dictionary. """ config = self.config grid = Storage(columns=[], features=[]) # column from the configuration # remove non Ext JS property for cfg in self._columns: for key in ("dbfield", "eval", "map"): if key in cfg: del cfg[key] grid.columns.append(cfg) # features from the configuration grid.features = json.loads(config.features) return grid def to_store(self): """Build the configuration for the ``Ext.data.Store``. Note: The name of the ``Ext.data.Field`` is extract from the column definition of the list where it is equal to ``table.field`` or ``table.field.key``. The dot is remove in the ``Ext.data.Field`` name. Therefore the ``dataIndex`` used in the grid column configuration has to be modified accordingly. Returns: plugin_dbui.Store: the configuration of the ``Ext.data.Store``. """ config = self.config store = Store(data=[], fields=[]) store.groupField = config.group_field.replace(".", "") store.sorters = [] for el in config.sorters: sorter = dict(property=el.replace(".", "").replace("~", "")) if el.startswith("~"): sorter["direction"] = "DESC" store.sorters.append(sorter) self._store = store self._set_store_fields() self._set_store_data() return self._store class Metric1D(List): """Build a report as a 1-dim metric. A Metric1D is a table displaying metrics when records are group by value for a given database field. for example, the ``cell[i]`` of the ``table.field2`` column contains the sum of the ``table.field2`` when ``table.field1 = group_value[i]``. Each column is associated to a database field. The first one shows the value of the group while the others correspond to the database fields on which the aggregation functions are applied. Many aggregation functions are available: ``count``, ``max``, ``mean``, ``median``, ``min``, ``size``, ``std``, ``sum``, *etc*. In fact, all the computation methods of the ``pandas.DataFrame`` class. A metric is defined by a database field and an aggregation function. several metrics can be computed as the same time applying different aggregation function possibly on different field. A summary information can also be computed for each column or rows. Args: table (gluon.dal.Table): table containing configurations for reports. id_report (int): identifier of the report in the table. """ def __init__(self, table, id_report): BaseReport.__init__(self, table, id_report) config = self.config # group by attributes field_groupby = config.group_field if field_groupby == "year": index_groupby = "year" text_groupby = "year" else: tu = split_dbfield(field_groupby) index_groupby = field_groupby.replace(".", "") text_groupby = (tu[2] if tu[2] else tu[1]) # first column contains group by information first_column = Storage(aggregate="", dbfield=field_groupby, text=text_groupby, xtype="gridcolumn") # columns configuration columns = [first_column] columns.extend([Storage(el) for el in json.loads(config.columns)]) # check column configuration # add database field map (tablename, fieldname, keyname) # add the dataIndex (DataFrame, Ext.data.Store, Ext.grid.Panel) map(self._check_column, columns) # persistence self._columns = columns self._field_groupby = field_groupby self._index_groupby = index_groupby self._do_metric() def _do_metric(self): """Compute the metric according to user specifications. """ columns = self._columns index_groupby = self._index_groupby # extract columns associated to database field # and their aggregation function indexes, maps, operators, xtypes = [], [], {}, [] for column in columns: if column.dbfield: dataIndex = column.dataIndex indexes.append(dataIndex) maps.append(column.map) aggregate = column.aggregate if aggregate: operators[dataIndex] = aggregate xtypes.append(column.xtype) # interrogate the database and fill the data frame data = self._do_data(maps) # protection against empty data if not data: indexes = (el.dataIndex for el in columns) self.df = DataFrame(columns=indexes) return df = DataFrame(data, columns=indexes) # cast dataframe column type to database field or grid column types self.df = df map(self._cast_type, indexes, maps, xtypes) # aggregate the data running dedicated operator for each column df = df.groupby(index_groupby) df = df.agg(operators) # computed column for el in columns: if el.eval: df[el.dataIndex] = df.eval(el.eval) # re-order the column to follow user configuration # exclude the first column containing the group_by data index = [el.dataIndex for el in columns[1:]] df = df[index] # transform year MultiIndex into Index (only for year pseudo field) if isinstance(df.index, MultiIndex): df.index = [el[1] for el in df.index] # make the data frame persistent self.df = df def _is_year(self, maps): """Supersede the method of the base class. Returns: bool: true if the group_field is year. """ return self.config.group_field == "year" def _set_store_data(self): """Generate the ``Ext.data.Store.data`` property. It is a list of dictionaries. Each of them contains the data for one row. One key, value pair for each ``Ext.data.Field``. The key is equal to the ``Ext.data.Field`` name. """ df = self.df data = df.to_dict(orient="records") # add the groupby value to each row index_groupby = self._index_groupby map(lambda row, v: row.__setitem__(index_groupby, v), data, df.index) self._store.data.extend(data) def _set_store_fields(self): """ Generate the Ext.data.Store.fields property. The store contains four fields: group, count, sum_fte and avg_age and data are grouped per the group field values. """ columns = self._columns index_groupby = self._index_groupby store = self._store for column in columns: cfg = Storage(name=column.dataIndex) xtype = column.xtype if xtype == "numbercolumn": cfg.type = "float" elif xtype == "booleancolumn": cfg.type = "boolean" elif xtype == "datecolumn": cfg.type = "date" elif xtype == "gridcolumn": cfg.type = "string" store.fields.append(cfg) store.sorters = [index_groupby] def to_grid(self): """Build the configuration for the ``Dbui.grid.Panel``. Returns: plugin_dbui.Grid: the configuration of the ``Dbui.grid.Panel``. The columns and features property are JSON encoded. """ columns = self._columns grid = Storage(columns=[], features=[]) # grid column configuration for cfg in columns: # remove non Ext JS property for key in ("aggregate", "dbfield", "eval", "map"): if key in cfg: del cfg[key] grid.columns.append(cfg) # activate summary feature grid.features = [{"ftype": "summary"}] return grid def to_store(self): """Build the configuration for the ``Ext.data.Store``. Returns: plugin_dbui.Store: the configuration of the ``Ext.data.Store``. """ self._store = Store(data=[], fields=[]) self._set_store_fields() self._set_store_data() return self._store class Metric2D(BaseReport): """Build a report as a 2-dim metric. A Metric2D is a table displaying a single metric for two database fields one against the other. For both database field the values are grouped. for example, the ``cell[i,j]`` contains the sum of the ``table.field3`` when ``table.field1 = group_value1[i]`` and ``table.field2 = group_value2[j]``. Many aggregation function are available: ``count``, ``max``, ``mean``, ``median``, ``min``, ``size``, ``std``, ``sum``, *etc*. In fact, all the computation methods of the ``pandas.DataFrame`` class. Args: table (gluon.dal.Table): table containing configurations for reports. id_report (int): identifier of the report in the table. """ def __init__(self, table, id_report): BaseReport.__init__(self, table, id_report) self._do_metric() # replace undefined value by 0 self.df = self.df.fillna(0) def _do_metric(self): """Compute the metric involving two database field. """ config = self.config # database field addresses # # the year can be on the horizontal or vertical axis # rotate to have year always along the horizontal axis # reverse operation will be performed at the end address_x = config.group_field_x address_y = config.group_field_y address_z = config.metric_field_z if address_y == "year": address_y, address_x = address_x, "year" addresses = [address_x, address_y, address_z] # the database field encoded as (tablename, fieldname, keyname) dbfield_x = split_dbfield(address_x) dbfield_y = split_dbfield(address_y) dbfield_z = split_dbfield(address_z) dbfields = [dbfield_x, dbfield_y, dbfield_z] # metric aggregate = config.aggregation_z metric = {} metric[address_z] = aggregate # database field expression along the z axis expression = None if not REG_SINGLE_DBFIELD.match(address_z): expression = address_z ref_addresses = list(addresses) del dbfields[2] del addresses[2] i = 0 for m in REG_DBFIELD.finditer(address_z): address = m.group() dbfield = split_dbfield(address) tmp = "a%i" % i i += 1 addresses.append(tmp) dbfields.append(dbfield) expression = expression.replace(address, tmp) # build the data frame data = self._do_data(dbfields) if not data: self.df = DataFrame(columns=addresses) return df = DataFrame(data, columns=addresses) # compute the expression and remove temporarely column if expression: df[address_z] = df.eval(expression) df = df[ref_addresses] # remove duplicate entries if aggregate in ("count", "size"): df = df.drop_duplicates() # group the data df = df.groupby([address_x, address_y]) df = df.agg(metric) # move to a multi index structure to a 2D table df = df.unstack(level=0) # rotate the data frame when year is along the y-axis if config.group_field_y == "year": df = df.T # transform year MultiIndex into Index if isinstance(df.columns, MultiIndex): df.columns = [el[1] for el in df.columns] if isinstance(df.index, MultiIndex): df.index = [el[1] for el in df.index] # data frame is persistent self.df = df def _set_store_data(self): """Generate the ``Ext.data.Store.data`` property. """ df = self.df name = self.config.group_field_y.replace(".", "") # idr is the index of the row either a tuple or a string # row is a dictionary containing row values for idr, row in df.T.to_dict().iteritems(): di = {} di[name] = (idr[1] if isinstance(idr, tuple) else idr) # idc is the index of the column either a tuple or a string for idc, v in row.iteritems(): k = (idc[1] if isinstance(idc, tuple) else idc) di[k] = v self._store.data.append(di) def _set_store_fields(self): """Generate the ``Ext.data.Store.fields`` property. """ store = self._store # first column is for the grouped values of the vertical field index_group_y = self.config.group_field_y.replace(".", "") di = dict(name=index_group_y, type="string") store.fields.append(di) # one column per grouped value of the horizontal field # the column identifier is a tuple (adress_z, group_x) for index in self._columns: di = dict(name=index, type="number") store.fields.append(di) store.sorters = [index_group_y] def to_grid(self): """Build the configuration for the ``Dbui.grid.Panel``. Returns: plugin_dbui.Grid: the configuration of the ``Dbui.grid.Panel``. The columns and features properties are JSON encoded. """ config = self.config address_y = config.group_field_y map_y = split_dbfield(address_y) text_y = (map_y[2] if map_y[2] else map_y[1]) grid = Storage(columns=[], features=[]) grid.columns.append({"text": current.T(text_y.title()), "dataIndex": address_y.replace(".", ""), "flex": 0.8}) for name in self._columns: grid.columns.append({"text": str(name), "dataIndex": str(name), "align": "right", "flex": 0.5, "format": "0.00", "summaryType": "sum", "xtype": "numbercolumn"}) grid.features.append({"ftype": "summary"}) return grid def to_store(self): """Build the configuration for the ``Ext.data.Store``. Returns: plugin_dbui.Store: the configuration of the ``Ext.data.Store``. """ # get the list of columns df = self.df # extract column label from the column index # the latter can be an Index of a MultIndex idx = df.columns idx = \ (idx.get_level_values(-1) if isinstance(idx, MultiIndex) else idx) self._columns = idx.tolist() # build the store configuration self._store = Store(data=[], fields=[]) self._set_store_fields() self._set_store_data() return self._store