Docker-in-Docker (DinD) capabilities of public runners deactivated. More info

Commit 8df273b9 authored by LE GAC Renaud's avatar LE GAC Renaud
Browse files

Merge branch '94-python37' into 'master'

Resolve "migrate to python 3.7"

Closes #94

See merge request !97
parents 52483d05 f0b197fe
--------------------------------- CHANGELOG ----------------------------------
HEAD
- Migrate to python3.7
0.9.7.1 (Oct 2019)
- Minor release.
......
# -*- coding: utf-8 -*-
""" Controllers for building graphs using pandas library
"""
import base64
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import io
from matplotlib.figure import Figure
from gluon import current
from gluon.storage import Storage
from graph_tools import (FROM_TO,
......@@ -18,9 +17,6 @@ from graph_tools import (FROM_TO,
from model_selector import YEAR_SUBMIT
mplstyle()
def dashboard():
"""Return a pre-configure linechart for public used.
Cumulative distribution for the publications are shown for
......@@ -41,8 +37,10 @@ def dashboard():
selector.Graph_selectorYear_start = ""
selector.Graph_selectorYear_end = ""
# figure layout
fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2, sharey=True)
# figure layout and style
mplstyle()
fig = Figure()
ax1, ax2 = fig.subplots(nrows=1, ncols=2, sharey=True)
# the number of publications per year except for the current year
# the starting value is defined in the preferences of the application
......@@ -54,8 +52,8 @@ def dashboard():
selector.Graph_selectorYear_start = min_year
selector.Graph_selectorYear_end = max_year
title = (T(FROM_TO) % (min_year, max_year)).decode("utf-8")
ylabel = T(LABELY_YEAR).decode("utf-8")
title = T(FROM_TO) % (min_year, max_year)
ylabel = T(LABELY_YEAR)
linechart(db,
selector,
......@@ -71,12 +69,16 @@ def dashboard():
selector.Graph_selectorYear_start = year
selector.Graph_selectorYear_end = ""
title = (T("In %s") % year).decode("utf-8")
title = T("In %s") % year
linechart(db, selector, target=ax2, title=title)
# delegate the rendering to the view
buf = io.BytesIO()
fig.savefig(buf, format="svg")
data = base64.b64encode(buf.getbuffer()).decode("ascii")
response.view = "graphs/index.html"
return dict(data=savefig(fig, "svg"))
return dict(data=data)
def publications_versus_time():
......@@ -89,17 +91,24 @@ def publications_versus_time():
graph = db.graphs[selector.Graph_selectorId_graphs]
axis = graph.stack_axis
# instantiate the graph
ax = (stackchart(db, selector) if axis else linechart(db, selector))
# figure and style
mplstyle()
fig = Figure()
ax = fig.subplots(nrows=1, ncols=1)
# create the plot
if axis:
stackchart(db, selector, target=ax)
else:
linechart(db, selector, target=ax)
# delegate the rendering to the view
extension = request.extension
fmt = "svg" if extension == "html" else extension
fmt = ("svg" if extension == "html" else extension)
# base64 string encoding
data = savefig(ax.get_figure(), fmt)
if fmt in ("pdf", "png"):
data = base64.b64encode(data)
buf = io.BytesIO()
fig.savefig(buf, format=fmt)
data = base64.b64encode(buf.getbuffer()).decode("ascii")
response.view = "graphs/index.%s" % extension
return dict(data=data)
......@@ -168,7 +168,7 @@ def edit_insert():
fauthor = record.first_author()
if isinstance(fauthor, list):
fauthor = u", ".join(fauthor)
fauthor = ", ".join(fauthor)
values["PublicationsFirst_author"] = fauthor
values["PublicationsAuthors"] = record.authors()
......@@ -281,7 +281,7 @@ def edit_insert():
values["PublicationsYear"] = year
except Exception:
except Exception as e:
# log the exception in the web2py ticker system
ticket = RestrictedError(layer="harvester.py",
......@@ -291,7 +291,7 @@ def edit_insert():
ticket.log(request)
# inform the user that something went wrong in the server
raise HTTP(500)
raise HTTP(500, T(str(e)))
return dict(cfg=cfg, values=values)
......
......@@ -45,7 +45,7 @@ def update():
# operation can be rejected by the database
except Exception as dbe:
return json.dumps(dict(success=False, msg=dbe.message))
return json.dumps(dict(success=False, msg=str(dbe)))
# operation can be reject by callbacks table._before_update
if not rep:
......
......@@ -51,7 +51,7 @@ def affiliation_institute():
# extract keys defining the affiliation
# u and v are the main keys use in inspirehep and cds
# b is uses by some note in Atlas
keys = (record[u"110"][k] for k in ("u", "t", "b") if k in record[u"110"])
keys = (record["110"][k] for k in ("u", "t", "b") if k in record["110"])
keys = (dict(key_u=key, key_v="") for key in keys)
# check that the rules does not exist
......@@ -99,8 +99,8 @@ def affiliation_publication():
reg = re.compile(pattern, re.IGNORECASE)
key = None
if u"700" in record and isinstance(record[u"700"], list):
for di in record[u"700"]:
if "700" in record and isinstance(record["700"], list):
for di in record["700"]:
author = di["a"]
if reg.match(author):
......@@ -404,7 +404,7 @@ def harvester():
# operation is rejected by the database
except Exception as dbe:
raise HTTP(500, dbe.message)
raise HTTP(500, str(dbe))
# operation is rejected by the callback
# NOTE in the else branch to avoid recursive exception generation
......
......@@ -271,7 +271,7 @@ latex_elements = {
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc,
'limbra.tex',
'limbra_user.tex',
u'LIMBRA Guide utilisateur',
u'D. Cristofol, R. Le Gac et E. Vernay',
'manual'),
......
......@@ -548,6 +548,7 @@
'Reject no conference date': 'Rejeté pas de dates pour la conférence',
'Reject no conference information': "Rejeté pas d'information sur la conférence",
'Reject no CPPM authors': "Rejeté pas d'auteurs du CPPM",
'Reject no host information in record': "Rejeté l'entrepôt n'est pas féfini dans la notice",
'Reject no OAI identifier': "Rejeté pas d'identifiant OAI",
'Reject no preprint number': 'Rejeté pas de numéro de preprint',
'Reject no preprint number nor submission date': 'Rejeté pas de numéro de preprint ou de date de soumission',
......
# -*- coding: utf-8 -*-
""" main
Instantiate the database connection, model database tables and configure the
......@@ -21,6 +20,7 @@ from _mydb import DBURIS
from auth import configure_auth, USER
from gluon import current
from gluon.tools import PluginManager
from model_app import App
from model_core import Core
from model_harvester import Harvester
......@@ -80,16 +80,26 @@ T.lazy = True # immediate translation
#
# Configure plugin_dbui
#
plugins = PluginManager()
plugins.dbui.pname = "plugin_dbui"
ctrl = request.controller
fnct = request.function
if ctrl == "plugin_dbui" and fnct in ("about", "index"):
if ctrl == "plugin_dbui" and fnct in ("about", "index", "documentations_table"):
Dbui.define_paths(
app_css="static/my.css",
app_lg="static/limbra/locale/limbra-lang-fr.js",
app_libmin="static/limbra-min.js",
app_script="static/app.js")
app_script="static/app.js",
app_db_schema="static/docs/database.png",
app_html_api="static/docs/api/index.html",
app_html_jsduck="static/docs/jsduck/index.html",
app_html_user="static/docs/user/index.html",
app_pdf_api="static/docs/pdf/limbra_api.pdf",
app_pdf_user="static/docs/pdf/limbra_user.pdf")
# ............................................................................
#
......
# -*- coding: utf-8 -*-
""" auth
* Customise the authentication
......
# -*- coding: utf-8 -*-
"""a collections of functions to be used in _before_delete, before_insert
and _before_update callbacks.
"""
from check_tools import (duplicate_article,
duplicate_conference,
duplicate_report)
from .check_tools import (duplicate_article,
duplicate_conference,
duplicate_report)
from gluon import current
from plugin_dbui import (CALLBACK_ERRORS,
get_id,
......@@ -26,14 +25,16 @@ def INHIBIT_CASCADE_DELETE(set_records):
bool: ``True`` when the delete is inhibited
"""
db, T = current.db, current.T
field = set_records.query.first
setquery = set_records.query
# protection
# the query of the set should be "table.id == 45"
if field._db._adapter.EQ != set_records.query.op:
if setquery.op.__name__ != "eq":
return False
db = current.db
field = setquery.first
# protection
# check that the table is ones of the publication reference tables
tables = (db.authors_roles,
......@@ -51,10 +52,10 @@ def INHIBIT_CASCADE_DELETE(set_records):
# inhibit the delete if publications use the reference field
query = get_where_query(db.publications)
query = (query) & (set_records.query)
query = (query) & (setquery)
if db(query).count():
field._table[CALLBACK_ERRORS] = T(MSG_DUPLICATE)
field._table[CALLBACK_ERRORS] = current.T(MSG_DUPLICATE)
return True
return False
......
# -*- coding: utf-8 -*-
"""a collection of tools to check rows.
"""
import re
import regex
from . import regex
from gluon import current
from plugin_dbui import (UNDEF,
UNDEF_ID,
......
# -*- coding: utf-8 -*-
""" countries
List of countries extract from the geographical database www.geonames.org:
......
# -*- coding: utf-8 -*-
"""a collections of functions to correct entries in the database.
"""
......@@ -13,10 +12,10 @@ def CLEAN_COLLABORATION(value):
* Collaboration always start with a Capital letter.
Args:
value (unicode): string where collaborations are separated by comma
value (str): string where collaborations are separated by comma
Returns:
unicode:
str:
"""
li = []
......@@ -84,10 +83,10 @@ def CLEAN_THESIS_DEFENSE(value):
* Remove prefix like *Presented*, *on*, *etc*.
Args:
value (unicode): string with the defence date
value (str): string with the defence date
Returns:
unicode:
str:
"""
value = value.replace("Presented ", "")
......
# -*- coding: utf-8 -*-
"""A collection of tools to build graphs in controllers.
"""
import matplotlib as mpl
import pandas as pd
import tempfile
from gluon import current
from model_selector import MONTH_SUBMIT, YEAR_PUBLISH
from io import StringIO
from .model_selector import MONTH_SUBMIT, YEAR_PUBLISH
from pandas import DataFrame, DatetimeIndex, to_datetime
from StringIO import StringIO
DATE_PUB = "Publication date"
......@@ -133,10 +133,10 @@ def emptychart(db, selector, target=None):
ax.tick_params(which="minor", length=4)
# x and y labels
xlabel = T(DATE_SUB).decode("utf-8")
xlabel = T(DATE_SUB)
ax.set_xlabel(xlabel, x=1, horizontalalignment="right")
ylabel = T(LABELY).decode("utf-8")
ylabel = T(LABELY)
ax.set_ylabel(ylabel, y=1, horizontalalignment="right")
# main title of the graph
......@@ -257,7 +257,7 @@ def linechart(db, selector, target=None, title=None, xlabel=None, ylabel=None):
# x label
if xlabel is None:
xlabel = T(DATE_PUB if is_publish_year else DATE_SUB).decode("utf-8")
xlabel = T(DATE_PUB if is_publish_year else DATE_SUB)
ax.set_xlabel(xlabel, x=1, horizontalalignment="right")
......@@ -274,7 +274,7 @@ def linechart(db, selector, target=None, title=None, xlabel=None, ylabel=None):
else:
ylabel = LABELY_YEAR
ylabel = T(ylabel).decode("utf-8")
ylabel = T(ylabel)
ax.set_ylabel(ylabel, y=1, horizontalalignment="right")
# main title of the graph
......@@ -345,7 +345,7 @@ def query_publications(db, is_publish, **kwargs):
elif year_end and not year_start:
query = publications.submitted[0:4] <= year_end
for k, v in di.iteritems():
for k, v in di.items():
if k in ("Graph_selectorYear_start", "Graph_selectorYear_end"):
continue
......@@ -388,10 +388,19 @@ def savefig(fig, fmt):
str:
"""
fi = StringIO()
fig.savefig(fi, format=fmt)
data = fi.getvalue()
fi.close()
if fmt == "svg":
fi = StringIO()
fig.savefig(fi, format=fmt)
data = fi.getvalue()
fi.close()
else:
fi = tempfile.TemporaryFile()
fig.savefig(fi, format=fmt)
fi.seek(0)
data = fi.read()
fi.close()
fig.clear()
mpl.pyplot.close(fig)
......@@ -441,7 +450,7 @@ def set_title(ax, db, selector):
period = year_start
title = "%s %s" % (title, period)
ax.set_title(title.decode("utf-8"))
ax.set_title(title)
def stackchart(db, selector, target=None):
......@@ -539,7 +548,7 @@ def stackchart(db, selector, target=None):
ax.tick_params(which="minor", length=4)
# x and y labels
xlabel = T(DATE_PUB if is_publish_year else DATE_SUB).decode("utf-8")
xlabel = T(DATE_PUB if is_publish_year else DATE_SUB)
ax.set_xlabel(xlabel, x=1, horizontalalignment="right")
# y label
......@@ -554,14 +563,14 @@ def stackchart(db, selector, target=None):
else:
ylabel = LABELY_YEAR
ylabel = T(ylabel).decode("utf-8")
ylabel = T(ylabel)
ax.set_ylabel(ylabel, y=1, horizontalalignment="right")
# legend title is the name of the axis
axis = (axis if axis == "categories" else graph.stack_granularity)
legend = ax.get_legend()
legend.set_title(T(axis).decode("utf-8"))
legend.set_title(T(axis))
# main title of the graph
set_title(ax, db, selector)
......
# -*- coding: utf-8 -*-
"""a collection of tools to search of publications in invenio store
and to push them in the database.
"""
from base import (DRY_RUN,
MSG_CRASH,
MSG_FIX_ORIGIN,
MSG_IN_DB,
MSG_LOAD,
MSG_NO_ENTRY,
MSG_TOOMANY_SYNONYM,
family_name_fr,
search_synonym)
from .base import (DRY_RUN,
MSG_CRASH,
MSG_FIX_ORIGIN,
MSG_IN_DB,
MSG_LOAD,
MSG_NO_ENTRY,
MSG_TOOMANY_SYNONYM,
family_name_fr,
search_synonym)
from automaton import Automaton
from articles import Articles
from checkandfix import CheckAndFix, MONTHS
from exception import CheckException, ToolException
from factory import build_harvester_tool, get_harvester_tool
from msg import Msg
from msgcollection import MsgCollection
from notes import Notes
from preprints import Preprints
from proceedings import Proceedings
from reports import Reports
from talks import Talks
from thesis import Thesis
from .automaton import Automaton
from .articles import Articles
from .checkandfix import CheckAndFix, MONTHS
from .exception import CheckException, ToolException
from .factory import build_harvester_tool, get_harvester_tool
from .msg import Msg
from .msgcollection import MsgCollection
from .notes import Notes
from .preprints import Preprints
from .proceedings import Proceedings
from .reports import Reports
from .talks import Talks
from .thesis import Thesis
# -*- coding: utf-8 -*-
""" harvest_tools.articles
"""
import traceback
from automaton import Automaton
from base import (learn_my_authors,
MSG_CRASH,
MSG_FIX_ORIGIN,
MSG_IN_DB,
MSG_LOAD)
from checkandfix import CheckException
from .automaton import Automaton
from .base import (learn_my_authors,
MSG_CRASH,
MSG_FIX_ORIGIN,
MSG_IN_DB,
MSG_LOAD)
from .checkandfix import CheckException
from plugin_dbui import get_id, UNDEF_ID
......@@ -45,7 +44,7 @@ class Articles(Automaton):
return False
if self.dbg:
print "check article record"
print("check article record")
try:
self.check.clean_erratum(record)
......@@ -70,7 +69,7 @@ class Articles(Automaton):
except Exception as e:
self.logs[-1].reject(MSG_CRASH % e, record=record, translate=False)
print traceback.format_exc()
print((traceback.format_exc()))
return False
return True
......@@ -93,22 +92,22 @@ class Articles(Automaton):
found later by the harvester.
Args:
oai_url (unicode): the oai_url, *e.g*
oai_url (str): the oai_url, *e.g*
``http://cds.cern.ch/record/123456``. The origin field
of the existing database record is update to **oai_url**
when a match is found.
year (unicode): the year of the publication. It is used
year (str): the year of the publication. It is used
by the search algorithm and by the logger.
Keyword Args:
id_publisher (int): identifier of the publisher in the database.
my_authors (unicode): authors of my institute separated by a comma.
pages (unicode): the page reference.
publication_url (unicode): the URL of the publications
preprint_number (unicode): the preprint number
title (unicode): the title of the publication.
volume (unicode): the volume reference.
my_authors (str): authors of my institute separated by a comma.
pages (str): the page reference.
publication_url (str): the URL of the publications
preprint_number (str): the preprint number
title (str): the title of the publication.
volume (str): the volume reference.
Returns:
tuple: ``(id, status)`` which contains the ``id`` of the record.
......@@ -118,7 +117,7 @@ class Articles(Automaton):
"""
if self.dbg:
print "get existing article by fields"
print("get existing article by fields")
# alias
db = self.db
......@@ -195,20 +194,20 @@ class Articles(Automaton):
All the keyword arguments are needed by the transformation.
Args:
primary_oai_url (unicode): the *primary* OAI identifier of the
primary_oai_url (str): the *primary* OAI identifier of the
record. It is used by the search algorithm.
year (unicode): the year of publication which is used
year (str): the year of publication which is used
by the logger.
Keyword Args:
id_publisher (int): identifier of the publisher in the database.
my_authors (unicode): authors of my institute separated by a comma.
oai_url (unicode): the full oai_url(s) of the article.
pages (unicode): the page reference.
publication_url (unicode): the URL of the publications
title (unicode): the title of the publication.
volume (unicode): the volume reference.
my_authors (str): authors of my institute separated by a comma.
oai_url (str): the full oai_url(s) of the article.
pages (str): the page reference.
publication_url (str): the URL of the publications
title (str): the title of the publication.
volume (str): the volume reference.
Returns:
tuple: ``(id, status)`` which contains the ``id`` of the record.
......@@ -218,7 +217,7 @@ class Articles(Automaton):
"""
if self.dbg:
print "check existing article by origin"
print("check existing article by origin")
# alias
db = self.db
......
# -*- coding: utf-8 -*-
""" harvest_tools.automaton
"""
......@@ -6,17 +5,17 @@ import re
import traceback