From 4cdc31ba1ad251d2faf0ae7c674d8d3b565b4aa0 Mon Sep 17 00:00:00 2001 From: Tiberiu Ichim Date: Fri, 9 Aug 2024 08:06:13 +0300 Subject: [PATCH] Broken links (#159) * WIP on links * Fix for bs links * Volto compatibility * Cleanup * Cleanup * WIP * WIP --- eea/climateadapt/broken_links.py | 452 ++++++ eea/climateadapt/broken_links.zcml | 32 + eea/climateadapt/browser/configure.zcml | 1667 +++++++++++------------ eea/climateadapt/browser/misc.py | 1267 +++++++---------- eea/climateadapt/browser/scripts.py | 18 +- eea/climateadapt/configure.zcml | 707 +++++----- eea/climateadapt/patches.zcml | 69 + 7 files changed, 2175 insertions(+), 2037 deletions(-) create mode 100644 eea/climateadapt/broken_links.py create mode 100644 eea/climateadapt/broken_links.zcml create mode 100644 eea/climateadapt/patches.zcml diff --git a/eea/climateadapt/broken_links.py b/eea/climateadapt/broken_links.py new file mode 100644 index 000000000..79e1eb56b --- /dev/null +++ b/eea/climateadapt/broken_links.py @@ -0,0 +1,452 @@ +import logging +import re +from collections import defaultdict +from datetime import datetime +from io import BytesIO + +import requests +import transaction +import xlsxwriter +from BeautifulSoup import BeautifulSoup +from DateTime import DateTime +from plone import api +from plone.app.textfield.value import RichTextValue +from plone.dexterity.utils import iterSchemataForType +from plone.restapi.behaviors import IBlocks +from plone.restapi.services import Service +from Products.CMFPlone.utils import getToolByName +from zope.annotation.interfaces import IAnnotations + +from eea.climateadapt.behaviors.aceitem import IAceItem +from eea.climateadapt.restapi.slate import iterate_children + +# from Products.Five.browser import BrowserView +# from plone.api.content import get_state + +logger = logging.getLogger("eea.climateadapt") + + +def convert_to_string(item): + """Convert to string other types""" + + if not item: + return "" + + if not isinstance(item, basestring): + new_item = "" + try: + iterator = iter(item) + except TypeError as err: + value = getattr(item, "raw", None) + + if value: + return value + logger.error(err) + + return "" + else: + for i in iterator: + new_item += i + + return new_item + + return item + + +def discover_links(string_to_search): + """Use regular expressions to get all urls in string""" + # REGEX = re.compile(ur'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.] + # [a-z]{2,4}/)(?:[^\s()<>]|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<> + # ]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'\".,<>?\xab\xbb\u201c\u201d\u2018 + # \u2019]))') + REGEX = re.compile( + "http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+" + ) + + try: + result = re.findall(REGEX, string_to_search) or [] + + if isinstance(result, basestring): + result = [result] + except Exception as err: + logger.error(err) + result = [] + + return result + + +def check_link_status(link): + """Check the links and return only the broken ones with the respective + status codes + """ + # return {"status": "404", "url": link} + + if link: + if isinstance(link, unicode): + try: + link = link.encode() + except UnicodeEncodeError: + logger.info("UnicodeEncodeError on link %s", link) + + return {"status": 504, "url": link} + + try: + if link[0:7].find("http") == -1: + link = "http://" + link + except Exception as err: + logger.error(err) + + logger.warning("Now checking: %s", link) + + try: + resp = requests.head(link, timeout=5, allow_redirects=True) + if resp.status_code == 404: + return {"status": "404", "url": link} + # requests.head(link, timeout=5, allow_redirects=True) + except requests.exceptions.ReadTimeout: + return {"status": "504", "url": link} + except requests.exceptions.ConnectTimeout: + logger.info("Timed out.") + logger.info("Trying again with link: %s", link) + try: + requests.head(link, timeout=30, allow_redirects=True) + except: + return {"status": "504", "url": link} + except requests.exceptions.TooManyRedirects: + logger.info("Redirected.") + logger.info("Trying again with link: %s", link) + try: + requests.head(link, timeout=30, allow_redirects=True) + except: + return {"status": "301", "url": link} + except requests.exceptions.URLRequired: + return {"status": "400", "url": link} + except requests.exceptions.ProxyError: + return {"status": "305", "url": link} + except requests.exceptions.HTTPError: + return {"status": "505", "url": link} + except: + return {"status": "404", "url": link} + + return + + +PORTAL_TYPES = [ + "eea.climateadapt.aceproject", + "eea.climateadapt.adaptationoption", + "eea.climateadapt.casestudy", + "eea.climateadapt.guidancedocument", + "eea.climateadapt.indicator", + "eea.climateadapt.informationportal", + "eea.climateadapt.mapgraphdataset", + "eea.climateadapt.organisation", + "eea.climateadapt.publicationreport", + "eea.climateadapt.researchproject", + "eea.climateadapt.tool", + "collective.cover.content", + "Document", + "Folder", +] + + +def extract_websites(obj): + urls = [] + if hasattr(obj, "websites"): + if isinstance(obj.websites, basestring): + lines = obj.websites.split(unicode("\n")) + for line in lines: + if line.strip(): + urls.append(line.strip()) + elif type(obj.websites) is list or type(obj.websites) is tuple: + urls.extend(list(obj.websites)) + + return urls + + +def extract_richtext(obj, fieldname): + urls = [] + field = getattr(obj, fieldname, "") + + if isinstance(field, RichTextValue): + text = field.output + if text: + bs = BeautifulSoup(text) + links = bs.findAll("a", attrs={"href": re.compile("^https?://")}) + urls.extend([link.get("href") for link in links]) + elif isinstance(field, basestring): + urls = discover_links(field) + + return urls + + +def convert_aceitem(obj): + urls = extract_websites(obj) + urls += extract_richtext(obj, "long_description") + urls += extract_richtext(obj, "description") + urls += extract_richtext(obj, "source") + urls += extract_richtext(obj, "comments") + return urls + + +def iterate_blocks(obj): + blocks = getattr(obj, "blocks", None) + layout = getattr(obj, "blocks_layout", {}) + + if not blocks: + raise StopIteration + + if layout: + items = layout.get("items") + for uid in items: + block = blocks[uid] + if block: + yield block + + +def handle_link(node): + url = node.get("data", {}).get("url") + return url + + +SLATE_NODE_HANDLERS = {"link": handle_link} + + +def extract_slate(block): + value = (block or {}).get("value", []) + children = iterate_children(value or []) + urls = [] + + for child in children: + node_type = child.get("type") + if node_type: + handler = SLATE_NODE_HANDLERS.get(node_type) + if handler: + link = handler(child) + if link: + urls.append(link) + + return urls + + +BLOCK_EXTRACTORS = {"slate": extract_slate} + + +def convert_blocks(obj): + urls = [] + for block in iterate_blocks(obj): + if not block: + continue + extractor = BLOCK_EXTRACTORS.get(block.get("@type")) + if extractor: + urls.extend(extractor(block)) + return urls + + +CONVERTORS = { + IAceItem: convert_aceitem, + IBlocks: convert_blocks, +} + + +def recursively_extract_links(site): + """Gets the links for all our items by using the websites field + along with the respective object urls + """ + + catalog = getToolByName(site, "portal_catalog") + types = getToolByName(site, "portal_types").listTypeInfo() + + convertors = defaultdict(list) + for _type in types: + portal_type = _type.getId() + if portal_type not in PORTAL_TYPES: + continue + for schemata in iterSchemataForType(portal_type): + for iface in [schemata] + list(schemata.getBases()): + convertor = CONVERTORS.get(iface) + if convertor: + convertors[portal_type].append(convertor) + + urls = [] + + brains = catalog.searchResults(portal_type=PORTAL_TYPES, path="/cca/en") + count = 0 + logger.info("Got %s objects" % len(brains)) + for b in brains: + obj = b.getObject() + path = obj.getPhysicalPath() + + for convertor in convertors[b.portal_type]: + urls.extend( + [ + {"link": link, "object_url": "/".join(path)} + for link in convertor(obj) + ] + ) + + count += 1 + + if count % 100 == 0: + logger.info("Done %s objects" % count) + + logger.info("Finished getting links.") + + return urls + + +def compute_broken_links(site): + """Script that will get called by cron once per day""" + + results = [] + annot = IAnnotations(site)["broken_links_data"] + links = recursively_extract_links(site) + + for info in links: + res = check_link_status(info["link"]) + if res is not None: + res["object_url"] = info["object_url"] + results.append(res) + + now = DateTime() + annot[now] = results + dates = annot.keys() + + if len(dates) >= 5: # maximum no. of dates stored + # delete oldest data except 'pre_nov7_data' + del annot[sorted(dates)[0]] + + IAnnotations(site)._p_changed = True + transaction.commit() + + +class BrokenLinksService(Service): + """Get workflow information""" + + items_to_display = 200 + + # def show_obj(self, path): + # """ Don't show objects which are not published + # """ + # path = '/'.join(path) + # obj = self.context.restrictedTraverse(path) + # state = get_state(obj) + # + # return state == 'published' + + # def url(self, path): + # path = "/".join(path[2:]) + # return path + + def results(self): + portal = api.portal.get() + annot = IAnnotations(portal)["broken_links_data"] + latest_dates = sorted(annot.keys())[-5:] + res = {} + + broken_links = [] + + # __import__("pdb").set_trace() + for date in latest_dates: + for info in annot[date]: + if "en" not in info["object_url"]: + continue + + item = {} + + # try: + # obj = self.context.unrestrictedTraverse(info["object_url"]) + # except: + # continue + # state = get_state(obj) + state = None + if state not in ["private", "archived"]: + if "climate-adapt.eea" in info["url"]: + item["state"] = "internal" + else: + item["state"] = "external" + + item["date"] = date.Date() if isinstance( + date, DateTime) else date + if isinstance(date, str) and date == "pre_nov7_data": + continue + + item["url"] = info["url"] + item["status"] = info["status"] + item["object_url"] = info["object_url"].replace( + "/cca/", "/") + + broken_links.append(item) + + broken_links.sort(key=lambda i: i["date"]) + + for link in broken_links: + res[link["url"]] = link + + # self.chunk_index = int(self.request.form.get("index", 0)) or 0 + # chunks = [] + # + # for i in range(0, len(res), self.items_to_display): + # chunks.append(dict(res.items()[i: i + self.items_to_display])) + # + # return chunks + + return res + + def data_to_xls(self, data): + headers = [ + ("url", "Destination Links"), + ("status", "Status Code"), + ("object_url", "Object Url"), + ("date", "Date"), + ("state", "Type"), + ] + + # Create a workbook and add a worksheet. + out = BytesIO() + workbook = xlsxwriter.Workbook(out, {"in_memory": True}) + + wtitle = "Broken-Links" + worksheet = workbook.add_worksheet(wtitle[:30]) + + for i, (key, title) in enumerate(headers): + worksheet.write(0, i, title or "") + + row_index = 1 + + for chunk in data: + for url, row in chunk.items(): + for i, (key, title) in enumerate(headers): + value = row[key] + worksheet.write(row_index, i, value or "") + + row_index += 1 + + workbook.close() + out.seek(0) + + return out + + def download_as_excel(self): + xlsdata = self.results() + xlsio = self.data_to_xls(xlsdata) + sh = self.request.response.setHeader + + sh( + "Content-Type", + "application/vnd.openxmlformats-officedocument." "spreadsheetml.sheet", + ) + fname = "-".join(["Broken-Links", + str(datetime.now().replace(microsecond=0))]) + sh("Content-Disposition", "attachment; filename=%s.xlsx" % fname) + + return xlsio.read() + + def reply(self): + if "download-excel" in self.request.form: + return self.download_as_excel() + + info = { + "@id": self.context.absolute_url() + "/@broken_links", + "broken_links": self.results(), + } + return info diff --git a/eea/climateadapt/broken_links.zcml b/eea/climateadapt/broken_links.zcml new file mode 100644 index 000000000..caa8d94f5 --- /dev/null +++ b/eea/climateadapt/broken_links.zcml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + diff --git a/eea/climateadapt/browser/configure.zcml b/eea/climateadapt/browser/configure.zcml index 268de9ff4..3ecf371f6 100755 --- a/eea/climateadapt/browser/configure.zcml +++ b/eea/climateadapt/browser/configure.zcml @@ -1,97 +1,99 @@ + xmlns="http://namespaces.zope.org/zope" + xmlns:browser="http://namespaces.zope.org/browser" + xmlns:i18n="http://namespaces.zope.org/i18n" + xmlns:z3c="http://namespaces.zope.org/z3c" + i18n_domain="eea.climateadapt" +> - + - + + name="iterate_control" + for="*" + class=".IterateControl" + allowed_attributes="checkin_allowed checkout_allowed cancel_allowed is_checkout" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="eea.climateadapt" + directory="resources" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.app.jquerytools.overlayhelpers.js" + file="resources/overlayhelpers.js" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="export_to_excel.js" + file="resources/export_to_excel.js" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + widget="z3c.form.interfaces.IOrderedSelectWidget" + template="pt/orderedselect_display.pt" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + mode="display" + /> + menu="plone_displayviews" + for="collective.cover.content.ICover" + > + title="Standard with no title" + action="no_title_cover_view" + /> + title="Columned View" + action="column_view" + /> + title="Vertical Tab View" + action="vertical_tab_view" + /> + menu="plone_displayviews" + for="*" + > + title="Listing with dates" + action="listing-with-dates" + /> + name="dashboard-v2" + for="*" + class=".dashboard.DashboardView" + template="pt/dashboard.pt" + permission="zope2.View" + /> + name="case-studies-map.arcgis.json" + for="*" + class=".casestudies_map.Items" + permission="zope2.View" + /> - + name="observatory_indicators_list" + for="*" + class=".observatory_indicators.ObservatoryIndicators" + template="pt/observatory_indicators_search.pt" + permission="zope2.View" + /> + name="case-studies-plotly" + for="*" + template="pt/case_study_plotly.pt" + permission="zope2.View" + /> + name="case-studies-plotly.json" + for="*" + class=".casestudies_network.Plotly" + permission="zope2.View" + /> + name="c3sindicator-test" + for="*" + template="pt/c3sindicator_test.pt" + permission="zope2.View" + /> + name="urban-landing-page" + for="*" + template="pt/landing_page_urban.pt" + permission="zope2.View" + class=".landing_page.Urban" + /> + name="forest-landing-page" + for="*" + template="pt/landing_page_forest.pt" + permission="zope2.View" + class=".landing_page.Forest" + /> - - + name="case-study-and-adaptation-options-map-viewer" + for="*" + class=".casestudies_map.Page" + template="pt/case-study-and-adaptation-options-map-viewer.pt" + permission="zope2.View" + /> + name="listing-with-dates" + for="*" + template="pt/listing-with-dates.pt" + permission="zope2.View" + /> + name="ace_macros" + for="*" + template="pt/ace_macros.pt" + permission="zope.Public" + /> + name="calculate-item-statistics" + for="*" + class=".misc.CalculateItemStatistics" + permission="cmf.ModifyPortalContent" + /> + name="write-macrotrans-regions" + for="*" + class=".misc.GetItemsForMacrotransRegions" + permission="cmf.ModifyPortalContent" + /> + name="clear-macrotrans-regions" + for="*" + class=".misc.ClearMacrotransnationalRegions" + permission="cmf.ModifyPortalContent" + /> + name="get-item-statistics" + for="*" + class=".misc.getItemStatistics" + template="pt/item_statistics.pt" + permission="zope2.View" + /> - + + + + + + + + + name="column_view" + for="*" + class=".CoverNoTitleView" + template="pt/cover_column_view.pt" + permission="zope2.View" + /> + name="vertical_tab_view" + for="*" + class=".CoverNoTitleView" + template="pt/vertical_tab_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.IIndicator" + class="eea.climateadapt.browser.aceitem.IndicatorView" + template="pt/indicator_view.pt" + permission="zope2.View" + /> - + name="view" + for="eea.climateadapt.aceitem.IC3sIndicator" + class="eea.climateadapt.browser.aceitem.C3sIndicatorView" + template="pt/c3sindicator_view.pt" + permission="zope2.View" + /> - + name="view" + for="eea.climateadapt.aceitem.IPublicationReport" + class="eea.climateadapt.browser.aceitem.PublicationReportView" + template="pt/publicationreport_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.IInformationPortal" + class="eea.climateadapt.browser.aceitem.InformationPortalView" + template="pt/informationportal_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.IGuidanceDocument" + class="eea.climateadapt.browser.aceitem.GuidanceDocumentView" + template="pt/guidancedocument_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.ITool" + class="eea.climateadapt.browser.aceitem.ToolView" + template="pt/tool_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.IMapGraphDataset" + class="eea.climateadapt.browser.mapgraphsdataset.MapGraphDatasetView" + template="pt/mapgraphdataset_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceitem.IOrganisation" + class="eea.climateadapt.browser.aceitem.OrganisationView" + template="pt/organisation_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.acemeasure.ICaseStudy" + class=".casestudy.CaseStudyView" + template="pt/casestudy_view.pt" + permission="zope2.View" + /> + name=".json" + for="eea.climateadapt.acemeasure.ICaseStudy" + class=".casestudy.CaseStudyJson" + permission="zope2.View" + /> - -----[ Form Tabs customizations, exclude blocks layout ]---- + name="view_cca_event" + for="plone.event.interfaces.IEvent" + class="eea.climateadapt.browser.cca_event.CcaEvent" + template="pt/ccaevent_view.pt" + permission="zope2.View" + /> -----[ Form Tabs customizations, exclude blocks layout ]---- + /> + /> - - -----[ Case Study Form customizations ]---- - - + /> + -----[ Case Study Form customizations ]---- + /> - - -----[ Adaptation Option Form customizations ]---- - - + /> + -----[ Adaptation Option Form customizations ]---- + /> - - -----[ Maps Graphs Form customizations ]---- - - + /> + -----[ Maps Graphs Form customizations ]---- + /> - - -----[ Videos Form customizations ]---- - - + /> + -----[ Videos Form customizations ]---- + /> - - -----[ Publication Reports Form customizations ]---- - - + /> + -----[ Publication Reports Form customizations ]---- + /> - - -----[ Information Portals Form customizations ]---- - - + /> + -----[ Information Portals Form customizations ]---- + /> - - -----[ Maps Graphs Form customizations ]---- - - + /> + -----[ Maps Graphs Form customizations ]---- + /> - - -----[ Tools Form customizations ]---- - - + /> + -----[ Tools Form customizations ]---- + /> - - -----[ Indicators Form customizations ]---- - - + /> + -----[ Indicators Form customizations ]---- + /> + /> - - -----[ C3sIndicators Form customizations ]---- - - + /> + -----[ C3sIndicators Form customizations ]---- + /> + /> - - -----[ Organisations Form customizations ]---- - - + /> + -----[ Organisations Form customizations ]---- + /> - - -----[ Ace Project Form customizations ]---- - - + /> + -----[ Ace Project Form customizations ]---- + /> - - ------ - - + /> + ------ + name="view" + for="eea.climateadapt.acemeasure.IAdaptationOption" + class="eea.climateadapt.browser.adaptationoption.AdaptationOptionView" + template="pt/adaptationoption_view.pt" + permission="zope2.View" + /> + name="view" + for="eea.climateadapt.aceproject.IAceProject" + class="eea.climateadapt.browser.aceproject.AceProjectView" + template="pt/aceproject_view.pt" + permission="zope2.View" + /> + name="site_navbar" + for="*" + class=".site.Navbar" + template="pt/navbar.pt" + permission="zope.Public" + /> + name="footer_navbar" + for="*" + template="pt/navbar_footer.pt" + permission="zope.Public" + class=".site.Navbar" + /> + name="health_navbar" + for="*" + class=".health_menu.Navbar" + template="pt/navbar.pt" + permission="zope.Public" + /> + name="help-nav" + for="*" + class=".site.Navbar" + template="pt/help-nav.pt" + permission="zope.Public" + /> + name="footer_logos" + for="*" + template="pt/footer_logos.pt" + permission="zope.Public" + /> + name="viewaceitem" + for="Products.CMFPlone.interfaces.siteroot.IPloneSiteRoot" + class=".ViewAceItem" + permission="zope.Public" + /> + name="viewmeasure" + for="Products.CMFPlone.interfaces.siteroot.IPloneSiteRoot" + class=".ViewAceMeasure" + permission="zope.Public" + /> + name="projects1" + for="Products.CMFPlone.interfaces.siteroot.IPloneSiteRoot" + class=".ViewAceProject" + permission="zope.Public" + /> + name="guest;jsessionid=607E735A83C3649F5D76F1CEF2441501" + for="*" + class=".misc.NewsletterRedirect" + permission="zope.Public" + /> + name="web" + for="*" + class=".misc.WebEmptyView" + permission="cmf.ManagePortal" + /> - + + + + + + - + + + + + + - + + + + + - - - - - + + + + + + + + name="countries-context-pagelet" + for="*" + class=".countries.ContextCountriesView" + template="pt/countries-context-pagelet.pt" + permission="zope2.View" + /> + name="countries-heat-index" + for="*" + class=".countries.ContextCountriesView" + template="pt/countries-heat-index.pt" + permission="zope2.View" + /> + name="countries-selector" + for="*" + class=".countries.ContextCountriesView" + template="pt/countries-selector.pt" + permission="zope2.View" + /> + name="countries-heat-index-json" + for="*" + class=".countries.ContextCountriesViewJson" + permission="zope2.View" + /> + name="countries-list" + for="*" + class=".countries.ContextCountriesView" + template="pt/countries-list.pt" + permission="zope2.View" + /> + name="adaptation-strategies" + for="*" + class=".misc.AdaptationStrategyView" + permission="zope2.View" + /> + name="map-viewer" + for="*" + class=".misc.MapViewerView" + permission="zope2.View" + /> + name="redirect_to_search_page" + for="*" + class=".misc.RedirectToSearchView" + permission="zope2.View" + /> + name="transnational-regions-view" + for="*" + class=".misc.TransRegionView" + template="pt/transnational_regions.pt" + permission="zope2.View" + /> + name="export-excel" + for="*" + class=".misc.ExcelCsvExportView" + template="pt/export_portaltypes.pt" + permission="zope2.View" + /> + name="fix-checkout" + for="*" + class=".misc.FixCheckout" + permission="cmf.ManagePortal" + /> + name="help-categories" + for="*" + template="pt/help-categories.pt" + permission="zope2.View" + /> + name="country-disclaimer" + for="*" + template="pt/country_disclaimer.pt" + permission="zope2.View" + /> + name="eu-sector-policies" + for="*" + template="pt/eu-sector-policies.pt" + permission="zope2.View" + /> + name="video-thumbs" + for="*" + template="pt/video_thumbs.pt" + permission="zope2.View" + /> + name="regions-section-select" + for="*" + class=".regions_select.TransRegionSelect" + template="pt/regions-section-select.pt" + permission="zope2.View" + /> + name="regions-section" + for="*" + class=".regions_select.TransRegionSelect" + template="pt/regions-section.pt" + permission="zope2.View" + /> + name="acecontent_search_helper" + for="*" + class="eea.climateadapt.browser.tilehelpers.AceContentSearch" + template="pt/helper_acecontentsearch.pt" + permission="zope2.View" + /> + name="view_last_modified" + for="*" + class="eea.climateadapt.browser.tilehelpers.LastUpdateTile" + template="pt/last_update.pt" + permission="zope2.View" + /> + name="baltic_region_menu" + for="eea.climateadapt.interfaces.IBalticRegionMarker" + manager="plone.app.layout.viewlets.interfaces.IAboveContentBody" + template="pt/viewlet_transregion_menu.pt" + permission="zope2.View" + /> + name="share_page_sub_menu" + for="eea.climateadapt.interfaces.IClimateAdaptSharePage" + manager="plone.app.layout.viewlets.interfaces.IAboveContentBody" + class=".viewlets.SharePageSubMenuViewlet" + permission="zope2.View" + /> + name="policy_sector_page_sub_menu" + for="plone.dexterity.interfaces.IDexterityItem" + manager="plone.app.layout.viewlets.interfaces.IAboveContentBody" + class=".viewlets.PolicySectorPageSubMenuViewlet" + permission="zope2.View" + /> + name="urbanast_bottom_nav" + for="*" + template="pt/viewlet_urbanast_bottom_nav.pt" + permission="zope2.View" + /> + name="plone.belowcontentbody.relateditems" + manager="plone.app.layout.viewlets.interfaces.IAboveContentBody" + class=".viewlets.RelatedItemsViewlet" + template="pt/viewlet_related_items.pt" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.belowcontentbody.relateditems" + manager="plone.app.layout.viewlets.interfaces.IBelowContentBody" + class=".viewlets.RelatedItemsViewlet" + template="pt/viewlet_related_items.pt" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.searchbox" + manager="plone.app.layout.viewlets.interfaces.IPortalHeader" + class=".viewlets.SearchBoxViewlet" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.resourceregistries" + view=".misc.ISimplifiedResourceRegistriesView" + manager="plone.app.layout.viewlets.interfaces.IHtmlHead" + template="pt/resourceregistries.pt" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.path_bar" + manager="plone.app.layout.viewlets.interfaces.IAboveContent" + class=".viewlets.PathBarViewlet" + template="pt/breadcrumbs.pt" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="archived-state" + manager="plone.app.layout.viewlets.interfaces.IBelowContentTitle" + class=".viewlets.ArchivedStateViewlet" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="tlspu.cookiepolicy" + manager="plone.app.layout.viewlets.interfaces.IBelowContent" + class=".viewlets.CookiesViewlet" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="plone.personal_bar" + manager="plone.app.layout.viewlets.interfaces.IPortalHeader" + class=".viewlets.CustomizedPersonalBarViewlet" + permission="zope2.View" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> + name="casestudies.json" + for="Products.CMFPlone.interfaces.siteroot.IPloneSiteRoot" + class=".casestudy.CaseStudiesJson" + permission="zope2.View" + /> + name="casestudies_xml" + for="Products.CMFPlone.interfaces.siteroot.IPloneSiteRoot" + class=".casestudy.CaseStudiesXML" + permission="zope2.View" + /> + name="country-metadata-extract" + for="*" + class=".countries.CountryMetadataExtract" + permission="cmf.ManagePortal" + /> + name="countries-metadata-extract" + for="*" + class=".countries.CountriesMetadataExtract" + permission="zope2.View" + /> + name="countries-view-map-d3" + for="*" + class=".countries.CountriesD3View" + template="pt/countries-d3.pt" + permission="zope2.View" + /> + name="external-template-head" + for="*" + template="pt/search/external-template-head.pt" + permission="zope2.View" + /> + name="external-template-header" + for="*" + class=".externaltemplates.ExternalTemplateHeader" + template="pt/search/external-template-header.pt" + permission="zope2.View" + /> + name="external-template-footer" + for="*" + template="pt/search/external-template-footer.pt" + permission="zope2.View" + /> @@ -1159,81 +1111,78 @@ - - + handler=".misc.preventFolderDeletionEvent" + /> + name="fullwidth_content_types" + for="*" + class=".main.FullWidthContentTypes" + permission="zope.Public" + /> + name="ast.pdf" + for="eea.pdf.interfaces.IPDFAware" + class="eea.pdf.browser.app.download.Download" + permission="eea.pdf.download" + layer="eea.pdf.interfaces.ILayer" + /> + name="case_study_contacts.csv" + for="*" + class=".download.CaseStudiesCSV" + permission="cmf.ManagePortal" + /> + name="keywords_tags.csv" + for="*" + class=".download.KeywordsTagsCSV" + permission="cmf.ManagePortal" + /> - Special pages + name="health_homepage" + for="*" + class=".health.HealthHomepageItems" + template="pt/health_homepage.pt" + permission="zope2.View" + /> Special pages + name="vibriomap-old-view" + for="*" + template="pt/vibriomap-old.pt" + permission="zope2.View" + /> + name="vibriomap-proxy" + for="*" + permission="zope2.View" + class=".misc.VibrioProxy" + /> + name="vibriomap-view-simple" + for="*" + template="pt/vibriomap-simple.pt" + permission="zope2.View" + /> + name="lancet-indicator-view" + for="*" + template="pt/lancet-indicator.pt" + permission="zope2.View" + /> diff --git a/eea/climateadapt/browser/misc.py b/eea/climateadapt/browser/misc.py index bc4f311db..d49b35fe1 100644 --- a/eea/climateadapt/browser/misc.py +++ b/eea/climateadapt/browser/misc.py @@ -1,81 +1,72 @@ import json import logging -import re import urllib -from datetime import datetime -from email.MIMEText import MIMEText -from io import BytesIO from itertools import islice import requests import transaction -import xlsxwriter -from BeautifulSoup import BeautifulSoup -from DateTime import DateTime from dateutil.tz import gettz -from eea.climateadapt.config import CONTACT_MAIL_LIST -from eea.climateadapt.schema import Email -from eea.climateadapt.translation.utils import ( - filters_to_query, - get_current_language, -) from OFS.ObjectManager import BeforeDeleteException from plone import api from plone.api import portal -from plone.api.content import get_state -from plone.api.portal import show_message from plone.app.iterate.interfaces import ICheckinCheckoutPolicy from plone.app.widgets.dx import DatetimeWidgetConverter as BaseConverter -from plone.directives import form -from plone.formwidget.captcha.validator import (CaptchaValidator, - WrongCaptchaCode) -from plone.formwidget.captcha.widget import CaptchaFieldWidget from plone.memoize import view -from plone.z3cform.layout import wrap_form from Products.CMFPlone.utils import getToolByName, isExpired from Products.Five.browser import BrowserView -from Products.statusmessages.interfaces import IStatusMessage -from z3c.form import button, field, validator -from ZODB.PersistentMapping import PersistentMapping -from zope import schema from zope.annotation.interfaces import IAnnotations from zope.component import getMultiAdapter from zope.interface import Interface, implements +from eea.climateadapt.translation.utils import ( + filters_to_query, + get_current_language, +) + +# from plone.api.portal import show_message +# from plone.directives import form +# from plone.formwidget.captcha.validator import CaptchaValidator, WrongCaptchaCode +# from plone.formwidget.captcha.widget import CaptchaFieldWidget +# from plone.z3cform.layout import wrap_form +# from Products.statusmessages.interfaces import IStatusMessage +# from z3c.form import button, field, validator +# from zope import schema +# from eea.climateadapt.config import CONTACT_MAIL_LIST +# from eea.climateadapt.schema import Email -logger = logging.getLogger('eea.climateadapt') +logger = logging.getLogger("eea.climateadapt") class Captcha(object): - subject = u"" - captcha = u"" + subject = "" + captcha = "" def __init__(self, context): self.context = context class NewsletterRedirect(BrowserView): - """ Redirect to newsletter #84251""" + """Redirect to newsletter #84251""" def __call__(self): - return self.request.response.redirect('/newsletter') + return self.request.response.redirect("/newsletter") class WebEmptyView(BrowserView): - """ Empty view for /web #84251""" + """Empty view for /web #84251""" def __call__(self): - return self.request.response.redirect('/newsletter') + return self.request.response.redirect("/newsletter") class CalculateItemStatistics(BrowserView): - """ Performs a catalog search for the portal types defined in the search() - After visiting the view /calculate-item-statistics it initializes - IAnnotations(site) -> performs the catalog search and saves the - results to IAnnotations(site) + """Performs a catalog search for the portal types defined in the search() + After visiting the view /calculate-item-statistics it initializes + IAnnotations(site) -> performs the catalog search and saves the + results to IAnnotations(site) - 'Total' refers to the number of total items, regardless of their review - state (published/private/sent/pending/etc) + 'Total' refers to the number of total items, regardless of their review + state (published/private/sent/pending/etc) """ def __call__(self): @@ -87,53 +78,55 @@ def initialize(self): self.cleanUpData() def initializeAnnotations(self): - """ Initializing Annotations """ - logger.info('Initializing Annotations') + """Initializing Annotations""" + logger.info("Initializing Annotations") annot = IAnnotations(self.context) - annot['cca-item-statistics'] = {} - types = getToolByName(self.context, 'portal_types').listContentTypes() + annot["cca-item-statistics"] = {} + types = getToolByName(self.context, "portal_types").listContentTypes() for year in range(1969, 2018): annotation = {} for ctype in types: - annotation[ctype] = {'published': 0, 'total': 0} - annot['cca-item-statistics'][year] = annotation + annotation[ctype] = {"published": 0, "total": 0} + annot["cca-item-statistics"][year] = annotation logger.info("Finished Initializing Annotations") def search(self): - """ Catalog search for all content types used """ + """Catalog search for all content types used""" logger.info("Starting the catalog search") catalog = self.context.portal_catalog - query = {'portal_type': [ - 'eea.climateadapt.aceproject', - 'eea.climateadapt.tool', - 'eea.climateadapt.researchproject', - 'eea.climateadapt.publicationreport', - 'eea.climateadapt.organisation', - 'eea.climateadapt.mapgraphdataset', - 'eea.climateadapt.informationportal', - 'eea.climateadapt.indicator', - 'eea.climateadapt.guidancedocument', - 'eea.climateadapt.casestudy', - 'eea.climateadapt.adaptationoption', - 'Link', - 'Document', - 'News Item', - 'Event', - 'collective.cover.content', - 'Folder', - 'EasyForm', - 'Collection'] - } + query = { + "portal_type": [ + "eea.climateadapt.aceproject", + "eea.climateadapt.tool", + "eea.climateadapt.researchproject", + "eea.climateadapt.publicationreport", + "eea.climateadapt.organisation", + "eea.climateadapt.mapgraphdataset", + "eea.climateadapt.informationportal", + "eea.climateadapt.indicator", + "eea.climateadapt.guidancedocument", + "eea.climateadapt.casestudy", + "eea.climateadapt.adaptationoption", + "Link", + "Document", + "News Item", + "Event", + "collective.cover.content", + "Folder", + "EasyForm", + "Collection", + ] + } brains = catalog.searchResults(**query) - logger.info('Got %s results.' % len(brains)) + logger.info("Got %s results." % len(brains)) items_count = 0 for brain in brains: if items_count % 100 == 0: - logger.info('Went through %s brains' % items_count) + logger.info("Went through %s brains" % items_count) obj = brain.getObject() obj_state = api.content.get_state(obj) creation_year = obj.created().year() @@ -148,7 +141,7 @@ def search(self): self.saveToAnnotations(creation_year, portal_type, False) - if obj_state == 'published': + if obj_state == "published": publish_year = obj.effective().year() if publish_year is None: @@ -157,52 +150,53 @@ def search(self): continue self.saveToAnnotations(publish_year, portal_type, True) items_count += 1 - logger.info('Finished the search.') + logger.info("Finished the search.") def saveToAnnotations(self, year, content_type, published): - """ Saves the number of brains depending on its review state """ - annotations = IAnnotations(self.context)['cca-item-statistics'] + """Saves the number of brains depending on its review state""" + annotations = IAnnotations(self.context)["cca-item-statistics"] if published: - annotations[year][content_type]['published'] += 1 - annotations[year][content_type]['total'] += 1 + annotations[year][content_type]["published"] += 1 + annotations[year][content_type]["total"] += 1 def cleanUpData(self): - """ Cleans up all the unnecessary indexes """ - logger.info('Cleaning up DATA') + """Cleans up all the unnecessary indexes""" + logger.info("Cleaning up DATA") for year in range(1969, 2018): annot = IAnnotations(self.context) - annotation = annot['cca-item-statistics'][year] + annotation = annot["cca-item-statistics"][year] keys = annotation.keys() for key in keys: - if annotation[key]['total'] == 0: + if annotation[key]["total"] == 0: annotation.pop(key, None) keys = annotation.keys() if len(keys) == 0: - IAnnotations(self.context)['cca-item-statistics'].pop(year) + IAnnotations(self.context)["cca-item-statistics"].pop(year) continue - logger.info('Finished cleaning up data') + logger.info("Finished cleaning up data") class getItemStatistics(BrowserView): - """ BrowserView used in order to display the total number of brains present - on the site in each year + """BrowserView used in order to display the total number of brains present + on the site in each year - path: site/@@get-item-statistics + path: site/@@get-item-statistics """ def __call__(self): return self.index() def get_portal_types(self, year): - """ Filters out the portal types """ - all_types = [{xx[0]: xx[1].title} - for xx in self.context.portal_types.objectItems()] - annotations = IAnnotations(self.context)['cca-item-statistics'] + """Filters out the portal types""" + all_types = [ + {xx[0]: xx[1].title} for xx in self.context.portal_types.objectItems() + ] + annotations = IAnnotations(self.context)["cca-item-statistics"] types = [] @@ -213,27 +207,27 @@ def get_portal_types(self, year): return types def get_years(self): - """ Gets the years present in IAnnotations and sorts them ascending """ - years = IAnnotations(self.context)['cca-item-statistics'].keys() + """Gets the years present in IAnnotations and sorts them ascending""" + years = IAnnotations(self.context)["cca-item-statistics"].keys() years.sort() return years def get_published(self, year, portal_type): - """ Gets the number of published items depending on year/portal_type""" - annotations = IAnnotations(self.context)['cca-item-statistics'] + """Gets the number of published items depending on year/portal_type""" + annotations = IAnnotations(self.context)["cca-item-statistics"] - return annotations[year][portal_type]['published'] + return annotations[year][portal_type]["published"] def get_total(self, year, portal_type): - """ Gets the number of total items depending on year/portal_type """ - annotations = IAnnotations(self.context)['cca-item-statistics'] + """Gets the number of total items depending on year/portal_type""" + annotations = IAnnotations(self.context)["cca-item-statistics"] - return annotations[year][portal_type]['total'] + return annotations[year][portal_type]["total"] class FixCheckout(BrowserView): - """ A view to fix getBaseline error when the original item was deleted + """A view to fix getBaseline error when the original item was deleted and only the copy remains. """ @@ -247,43 +241,44 @@ def __call__(self): class ISimplifiedResourceRegistriesView(Interface): - """ A view with simplified resource registries """ + """A view with simplified resource registries""" class TransRegionView(BrowserView): - """ Custom view for /transnational-regions """ + """Custom view for /transnational-regions""" implements(ISimplifiedResourceRegistriesView) -class CountriesView (BrowserView): - """ Custom view for http://climate-adapt.eea.europa.eu/countries """ +class CountriesView(BrowserView): + """Custom view for http://climate-adapt.eea.europa.eu/countries""" implements(ISimplifiedResourceRegistriesView) -class MapViewerView (BrowserView): - """ Custom view for http://climate-adapt.eea.europa.eu/tools/map-viewer """ +class MapViewerView(BrowserView): + """Custom view for http://climate-adapt.eea.europa.eu/tools/map-viewer""" implements(ISimplifiedResourceRegistriesView) def __call__(self): - return self.request.response.redirect('/tools/map-viewer?' + - self.request['QUERY_STRING']) + return self.request.response.redirect( + "/tools/map-viewer?" + self.request["QUERY_STRING"] + ) -class AdaptationStrategyView (BrowserView): - """ Redirect for http://climate-adapt.eea.europa.eu/adaptation-strategies - to /countries-view-map +class AdaptationStrategyView(BrowserView): + """Redirect for http://climate-adapt.eea.europa.eu/adaptation-strategies + to /countries-view-map """ @view.memoize def __call__(self): - return self.request.response.redirect('/countries') + return self.request.response.redirect("/countries") -class RedirectToSearchView (BrowserView): - """ Custom view for /content """ +class RedirectToSearchView(BrowserView): + """Custom view for /content""" def __init__(self, context, request): # Each view instance receives context and request as construction parameters @@ -292,234 +287,130 @@ def __init__(self, context, request): def __call__(self): current_language = get_current_language(self.context, self.request) - portal_state = getMultiAdapter((self.context, self.request), - name=u'plone_portal_state') + portal_state = getMultiAdapter( + (self.context, self.request), name="plone_portal_state" + ) - typeOfDataTo = self.request.other['ACTUAL_URL'].split('/')[-1] + typeOfDataTo = self.request.other["ACTUAL_URL"].split("/")[-1] typeOfDataValues = { - 'adaptation-options': 'Adaptation options', - 'case-studies': 'Case studies', - 'indicators': 'Indicators', - 'portals': 'Information portals', - 'guidances': 'Guidance', - 'organisations': 'Organisations', - 'publications': 'Publications and reports', - 'projects': 'Research and knowledge projects', - 'tools': 'Tools', - 'videos': 'Videos', + "adaptation-options": "Adaptation options", + "case-studies": "Case studies", + "indicators": "Indicators", + "portals": "Information portals", + "guidances": "Guidance", + "organisations": "Organisations", + "publications": "Publications and reports", + "projects": "Research and knowledge projects", + "tools": "Tools", + "videos": "Videos", } navigation_root_url = portal_state.navigation_root_url() - if '/observatory' in navigation_root_url: - link = '/'+current_language+'/observatory/catalogue/' + if "/observatory" in navigation_root_url: + link = "/" + current_language + "/observatory/catalogue/" else: - link = '/'+current_language+'/data-and-downloads/' - - if link == '/'+current_language+'/observatory/catalogue/' and typeOfDataTo == 'organisations': - link = '/'+current_language+'/observatory/About/about-the-observatory#partners' + link = "/" + current_language + "/data-and-downloads/" + + if ( + link == "/" + current_language + "/observatory/catalogue/" + and typeOfDataTo == "organisations" + ): + link = ( + "/" + + current_language + + "/observatory/About/about-the-observatory#partners" + ) else: - querystring = self.request.form.get('SearchableText', "") + querystring = self.request.form.get("SearchableText", "") query = { - u'display_type': u'list', - u'highlight': { - u'fields': { - u'*': { - } + "display_type": "list", + "highlight": {"fields": {"*": {}}}, + "query": { + "bool": { + "must": [ + {"term": {"hasWorkflowState": "published"}}, + { + "query_string": { + "analyze_wildcard": True, + "default_operator": "OR", + "query": querystring, + } + }, + ] } }, - u'query': { - u'bool': { - u'must': - [{u'term': {u'hasWorkflowState': u'published'}}, - {u'query_string': {u'analyze_wildcard': True, - u'default_operator': u'OR', - u'query': querystring} - }] - } - } } if typeOfDataTo in typeOfDataValues: - query['query']['bool']['filter'] = { - "bool": {"should": [{"term": {"typeOfData": typeOfDataValues[typeOfDataTo]}}]}} + query["query"]["bool"]["filter"] = { + "bool": { + "should": [ + {"term": { + "typeOfData": typeOfDataValues[typeOfDataTo]}} + ] + } + } - link = link + '?source=' + \ - urllib.quote(json.dumps(query))+'&lang='+current_language + link = ( + link + + "?source=" + + urllib.quote(json.dumps(query)) + + "&lang=" + + current_language + ) return self.request.response.redirect(link) -class ExcelCsvExportView (BrowserView): - """ View with links to the excel export for portal types """ - - -class DetectBrokenLinksView (BrowserView): - """ View for detecting broken links""" - - items_to_display = 200 - - # def show_obj(self, path): - # """ Don't show objects which are not published - # """ - # path = '/'.join(path) - # obj = self.context.restrictedTraverse(path) - # state = get_state(obj) - # - # return state == 'published' - - def url(self, path): - path = '/'.join(path[2:]) - return path - - def results(self): - portal = api.portal.get() - annot = IAnnotations(portal)['broken_links_data'] - latest_dates = sorted(annot.keys())[-5:] - res = {} - - broken_links = [] - - for date in latest_dates: - for info in annot[date]: - if 'en' not in info['object_url']: - continue - - item = {} - - try: - obj = self.context.unrestrictedTraverse(info['object_url']) - except: - continue - - state = get_state(obj) - if state not in ['private', 'archived']: - if 'climate-adapt.eea' in info['url']: - item['state'] = 'internal' - else: - item['state'] = 'external' - - item['date'] = date.Date() if isinstance( - date, DateTime) else date - if (isinstance(date, str) and date == 'pre_nov7_data'): - continue - - item['url'] = info['url'] - item['status'] = info['status'] - item['object_url'] = self.url(info['object_url']) - - broken_links.append(item) - - broken_links.sort(key=lambda i: i['date']) - - for link in broken_links: - res[link['url']] = link - - self.chunk_index = int(self.request.form.get('index', 0)) or 0 - chunks = [] - - for i in range(0, len(res), self.items_to_display): - chunks.append(dict(res.items()[i:i + self.items_to_display])) - - return chunks - - def data_to_xls(self, data): - headers = [ - ('url', 'Destination Links'), - ('status', 'Status Code'), - ('object_url', 'Object Url'), - ('date', 'Date'), - ('state', 'Type') - ] - - # Create a workbook and add a worksheet. - out = BytesIO() - workbook = xlsxwriter.Workbook(out, {'in_memory': True}) - - wtitle = 'Broken-Links' - worksheet = workbook.add_worksheet(wtitle[:30]) - - for i, (key, title) in enumerate(headers): - worksheet.write(0, i, title or '') - - row_index = 1 - - for chunk in data: - for url, row in chunk.items(): - for i, (key, title) in enumerate(headers): - value = row[key] - worksheet.write(row_index, i, value or '') +class ExcelCsvExportView(BrowserView): + """View with links to the excel export for portal types""" - row_index += 1 - workbook.close() - out.seek(0) - - return out - - def download_as_excel(self): - xlsdata = self.results() - xlsio = self.data_to_xls(xlsdata) - sh = self.request.response.setHeader - - sh('Content-Type', 'application/vnd.openxmlformats-officedocument.' - 'spreadsheetml.sheet') - fname = "-".join(["Broken-Links", - str(datetime.now().replace(microsecond=0))]) - sh('Content-Disposition', - 'attachment; filename=%s.xlsx' % fname) - - return xlsio.read() - - def __call__(self): - if 'download-excel' in self.request.form: - return self.download_as_excel() - - return self.index() - - -class ClearMacrotransnationalRegions (BrowserView): - """ Clear the macrotransnational regions from geographic localization +class ClearMacrotransnationalRegions(BrowserView): + """Clear the macrotransnational regions from geographic localization if all the regions are selected """ def __call__(self): return - logger.info('Starting to clear regions.') + logger.info("Starting to clear regions.") for brain in self.catalog_search(): self.clear_regions(brain.getObject()) - logger.info('Finished clearing regions.') + logger.info("Finished clearing regions.") def catalog_search(self): catalog = self.context.portal_catalog - query = {'portal_type': [ - 'eea.climateadapt.aceproject', - 'eea.climateadapt.adaptationoption', - 'eea.climateadapt.casestudy', - 'eea.climateadapt.guidancedocument', - 'eea.climateadapt.indicator', - 'eea.climateadapt.informationportal', - 'eea.climateadapt.mapgraphdataset', - 'eea.climateadapt.organisation', - 'eea.climateadapt.publicationreport', - 'eea.climateadapt.researchproject', - 'eea.climateadapt.tool', - ]} + query = { + "portal_type": [ + "eea.climateadapt.aceproject", + "eea.climateadapt.adaptationoption", + "eea.climateadapt.casestudy", + "eea.climateadapt.guidancedocument", + "eea.climateadapt.indicator", + "eea.climateadapt.informationportal", + "eea.climateadapt.mapgraphdataset", + "eea.climateadapt.organisation", + "eea.climateadapt.publicationreport", + "eea.climateadapt.researchproject", + "eea.climateadapt.tool", + ] + } brains = catalog.searchResults(**query) return brains def clear_regions(self, obj): - if obj.geochars in [None, u'', '', []]: + if obj.geochars in [None, "", "", []]: return geochars = json.loads(obj.geochars) - macro = geochars['geoElements'].get('macrotrans', []) + macro = geochars["geoElements"].get("macrotrans", []) if macro: if len(macro) == 13: - logger.info('Clearing regions on %s' % obj.absolute_url()) - geochars['geoElements']['macrotrans'] = [] + logger.info("Clearing regions on %s" % obj.absolute_url()) + geochars["geoElements"]["macrotrans"] = [] geochars = json.dumps(geochars).encode() obj.geochars = geochars obj._p_changed = True @@ -527,7 +418,7 @@ def clear_regions(self, obj): class GetItemsForMacrotransRegions(BrowserView): - """ Write to files the url of objects belonging to either the caribbean + """Write to files the url of objects belonging to either the caribbean or se-europe region NOTE: this is one time use only view @@ -539,44 +430,44 @@ def __call__(self): for b in self.catalog_search(): obj = b.getObject() - if obj.geochars in [None, u'', '', []]: + if obj.geochars in [None, "", "", []]: continue geochars = json.loads(obj.geochars) - macro = geochars['geoElements'].get('macrotrans', []) + macro = geochars["geoElements"].get("macrotrans", []) if macro: - if 'TRANS_MACRO_CAR_AREA' in macro: + if "TRANS_MACRO_CAR_AREA" in macro: self.write_caribbean(obj) - if 'TRANS_MACRO_SE_EUR' in macro: + if "TRANS_MACRO_SE_EUR" in macro: self.write_se_europe(obj) - logger.info('Completed writing to files.') + logger.info("Completed writing to files.") def write_caribbean(self, obj): - logger.info('Writing %s to CARIBBEAN' % obj.absolute_url()) - with open('/'.join(['/tmp/', 'caribbean']), 'a') as f: - f.writelines('Object URL: %s \n' % obj.absolute_url()) + logger.info("Writing %s to CARIBBEAN" % obj.absolute_url()) + with open("/".join(["/tmp/", "caribbean"]), "a") as f: + f.writelines("Object URL: %s \n" % obj.absolute_url()) def write_se_europe(self, obj): - logger.info('Writing %s to SE EUROPE' % obj.absolute_url()) - with open('/'.join(['/tmp/', 'se-europe']), 'a') as f: - f.writelines('Object URL: %s \n' % obj.absolute_url()) + logger.info("Writing %s to SE EUROPE" % obj.absolute_url()) + with open("/".join(["/tmp/", "se-europe"]), "a") as f: + f.writelines("Object URL: %s \n" % obj.absolute_url()) def catalog_search(self): catalog = self.context.portal_catalog query = { - 'portal_type': [ - 'eea.climateadapt.aceproject', - 'eea.climateadapt.adaptationoption', - 'eea.climateadapt.casestudy', - 'eea.climateadapt.guidancedocument', - 'eea.climateadapt.indicator', - 'eea.climateadapt.informationportal', - 'eea.climateadapt.mapgraphdataset', - 'eea.climateadapt.organisation', - 'eea.climateadapt.publicationreport', - 'eea.climateadapt.researchproject', - 'eea.climateadapt.tool', + "portal_type": [ + "eea.climateadapt.aceproject", + "eea.climateadapt.adaptationoption", + "eea.climateadapt.casestudy", + "eea.climateadapt.guidancedocument", + "eea.climateadapt.indicator", + "eea.climateadapt.informationportal", + "eea.climateadapt.mapgraphdataset", + "eea.climateadapt.organisation", + "eea.climateadapt.publicationreport", + "eea.climateadapt.researchproject", + "eea.climateadapt.tool", ] } brains = catalog.searchResults(**query) @@ -585,11 +476,10 @@ def catalog_search(self): def _archive_news(site): - """ Script that will get called by cron once per day - """ - catalog = getToolByName(site, 'portal_catalog') - query = {'portal_type': ['News Item', 'Link', 'Event'], - 'review_state': 'published'} + """Script that will get called by cron once per day""" + catalog = getToolByName(site, "portal_catalog") + query = {"portal_type": ["News Item", "Link", + "Event"], "review_state": "published"} brains = catalog.searchResults(**query) for b in brains: @@ -597,417 +487,187 @@ def _archive_news(site): # if isExpired(obj) == 1 and api.content.get_state(obj) != 'archived': if isExpired(obj) == 1: - logger.info('Archiving %s' % obj.absolute_url()) - api.content.transition(obj, 'archive') + logger.info("Archiving %s" % obj.absolute_url()) + api.content.transition(obj, "archive") transaction.commit() -def convert_to_string(item): - """ Convert to string other types - """ - - if not item: - return '' - - if not isinstance(item, basestring): - new_item = "" - try: - iterator = iter(item) - except TypeError, err: - value = getattr(item, 'raw', None) - - if value: - return value - logger.error(err) - - return '' - else: - for i in iterator: - new_item += i - - return new_item - - return item - - -def discover_links(string_to_search): - """ Use regular expressions to get all urls in string - """ - # REGEX = re.compile(ur'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.] - # [a-z]{2,4}/)(?:[^\s()<>]|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<> - # ]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'\".,<>?\xab\xbb\u201c\u201d\u2018 - # \u2019]))') - REGEX = re.compile( - 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+') - - try: - result = re.findall(REGEX, string_to_search) or [] - - if isinstance(result, basestring): - result = [result] - except Exception, err: - logger.error(err) - result = [] - - return result - - -def compute_broken_links(site): - """ Script that will get called by cron once per day - """ - - results = [] - annot = IAnnotations(site)['broken_links_data'] - now = DateTime() - links = get_links(site) - - if isinstance(annot, list): - # store old data - old_data = annot - annot = PersistentMapping() - IAnnotations(site)['broken_links_data'] = annot - annot['pre_nov7_data'] = old_data - - for info in links: - res = check_link(info['link']) - if res is not None: - res['object_url'] = info['object_url'] - results.append(res) - - annot[now] = results - dates = annot.keys() - - if len(dates) >= 5: # maximum no. of dates stored - # delete oldest data except 'pre_nov7_data' - del annot[sorted(dates)[0]] - - IAnnotations(site)._p_changed = True - transaction.commit() - - -def get_links(site): - """ Gets the links for all our items by using the websites field - along with the respective object urls - """ - - catalog = getToolByName(site, 'portal_catalog') - query = { - 'portal_type': [ - 'eea.climateadapt.aceproject', - 'eea.climateadapt.adaptationoption', - 'eea.climateadapt.casestudy', - 'eea.climateadapt.guidancedocument', - 'eea.climateadapt.indicator', - 'eea.climateadapt.informationportal', - 'eea.climateadapt.mapgraphdataset', - 'eea.climateadapt.organisation', - 'eea.climateadapt.publicationreport', - 'eea.climateadapt.researchproject', - 'eea.climateadapt.tool', - 'collective.cover.content', - ] - } - brains = catalog.searchResults(**query) - urls = [] - - def append_urls(link, path): return urls.append({ - 'link': link, - 'object_url': path - }) - count = 0 - logger.info('Got %s objects' % len(brains)) - - for b in brains: - obj = b.getObject() - path = obj.getPhysicalPath() - - if 'en' not in path: - continue - - if hasattr(obj, 'websites'): - if isinstance(obj.websites, str): - append_urls(obj.websites, path) - elif type(obj.websites) is list or type(obj.websites) is tuple: - for url in obj.websites: - append_urls(url, path) - attrs = ['long_description', 'description', 'source', 'comments'] - - for attr in attrs: - string_to_search = convert_to_string(getattr(obj, attr, '')) - - if len(string_to_search) > 0: - if attr == 'long_description': - bs = BeautifulSoup(string_to_search) - links = bs.findAll( - 'a', attrs={'href': re.compile("^https?://")} - ) - - for link in links: - append_urls(link.get('href'), path) - else: - links = discover_links(string_to_search) - - # get rid of duplicates - links = list(set(links)) - - for link in links: - append_urls(link, path) - - if obj.portal_type == 'collective.cover.content': - for tile in obj.list_tiles(): - if 'richtext' in obj.get_tile_type(tile): - richtext = obj.get_tile(tile).getText() - bs = BeautifulSoup(richtext) - links = bs.findAll( - 'a', attrs={'href': re.compile("^https?://")} - ) - - for link in links: - append_urls(link.get('href'), path) - - count += 1 - - if count % 100 == 0: - logger.info('Finished going through %s objects' % count) - - logger.info("Finished getting links.") - - return urls - - -def check_link(link): - """ Check the links and return only the broken ones with the respective - status codes - """ - - if link: - if isinstance(link, unicode): - try: - link = link.encode() - except UnicodeEncodeError: - logger.info('UnicodeEncodeError on link %s', link) - - return {'status': 504, 'url': link} - - try: - if link[0:7].find('http') == -1: - link = 'http://' + link - except Exception, err: - logger.error(err) - - logger.warning("Now checking: %s", link) - - try: - resp = requests.head(link, timeout=5, allow_redirects=True) - if resp.status_code == 404: - return {'status': '404', 'url': link} - # requests.head(link, timeout=5, allow_redirects=True) - except requests.exceptions.ReadTimeout: - return {'status': '504', 'url': link} - except requests.exceptions.ConnectTimeout: - logger.info("Timed out.") - logger.info("Trying again with link: %s", link) - try: - requests.head(link, timeout=30, allow_redirects=True) - except: - return {'status': '504', 'url': link} - except requests.exceptions.TooManyRedirects: - logger.info("Redirected.") - logger.info("Trying again with link: %s", link) - try: - requests.head(link, timeout=30, allow_redirects=True) - except: - return {'status': '301', 'url': link} - except requests.exceptions.URLRequired: - return {'status': '400', 'url': link} - except requests.exceptions.ProxyError: - return {'status': '305', 'url': link} - except requests.exceptions.HTTPError: - return {'status': '505', 'url': link} - except: - return {'status': '404', 'url': link} - - return - - -class IContactForm(form.Schema): - name = schema.TextLine(title=u"Name:", required=True) - email = Email(title=u"Email:", required=True) - feedback = schema.Choice(title=u"Type of feedback:", required=True, - values=[ - "Request for information", - "Suggestion for Improvement", - "Broken link", - ]) - message = schema.Text(title=u"Message:", required=True) - - captcha = schema.TextLine( - title=u"Captcha", - description=u"", - required=False - ) - - -class ContactForm(form.SchemaForm): - """ Contact Form - """ - - schema = IContactForm - ignoreContext = True - - label = u"Contact CLIMATE-ADAPT" - description = u""" Please use the contact form below if you have questions - on CLIMATE-ADAPT, to suggest improvements for CLIMATE-ADAPT or to report - broken links. - """ - - fields = field.Fields(IContactForm) - fields['captcha'].widgetFactory = CaptchaFieldWidget - - @button.buttonAndHandler(u"Submit") - def handleApply(self, action): - data, errors = self.extractData() - - if errors: - self.status = self.formErrorsMessage - - return - - if 'captcha' in data: - # Verify the user input against the captcha - captcha = CaptchaValidator(self.context, self.request, None, - IContactForm['captcha'], None) - - try: - valid = captcha.validate(data['captcha']) - except WrongCaptchaCode: - show_message(message=u"Invalid Captcha.", - request=self.request, type='error') - return - - if valid: - mail_host = api.portal.get_tool(name='MailHost') - # emailto = str(api.portal.getSite().email_from_address) - - mime_msg = MIMEText(data.get('message')) - mime_msg['Subject'] = data.get('feedback') - mime_msg['From'] = data.get('email') - # mime_msg['To'] = ','.join(b for b in CONTACT_MAIL_LIST) - # mime_msg['To'] = CONTACT_MAIL_LIST - - for m in CONTACT_MAIL_LIST: - mime_msg['To'] = m - - self.description = u"Email Sent." - IStatusMessage(self.request).addStatusMessage( - "Email SENT", - 'info') - return mail_host.send(mime_msg.as_string()) - else: - self.description = u"Please complete the Captcha." - - -class IContactFooterForm(form.Schema): - - name = schema.TextLine(title=u"Name:", required=True) - email = Email(title=u"Your Email:", required=True) - subject = schema.TextLine(title=u"Subject", required=True) - message = schema.Text(title=u"Message:", required=True) - - captcha = schema.TextLine( - title=u"Captcha", - description=u"", - required=False - ) - - -class ContactFooterForm(form.SchemaForm): - """ Footer Contact Form - """ - - schema = IContactFooterForm - ignoreContext = True - - label = u"Contact form" - description = u""" Climate-ADAPT aims to support Europe in adapting to - climate change. It is an initiative of the European Commission and helps - users to access and share data and information on expected climate change - in Europe. Fill in this form to contact the site owners. - """ - - fields = field.Fields(IContactFooterForm) - fields['captcha'].widgetFactory = CaptchaFieldWidget - - @button.buttonAndHandler(u"Submit") - def handleApply(self, action): - data, errors = self.extractData() - - if errors: - self.status = self.formErrorsMessage - - return - - if 'captcha' in data: - # Verify the user input against the captcha - captcha = CaptchaValidator(self.context, - self.request, None, - IContactFooterForm['captcha'], None) - - try: - valid = captcha.validate(data['captcha']) - except WrongCaptchaCode: - show_message(message=u"Invalid Captcha.", - request=self.request, type='error') - return - - if valid: - mail_host = api.portal.get_tool(name='MailHost') - - info = {'name': data.get('name'), - 'mail': data.get('email'), - 'url': self.context.absolute_url()} - text = """ - -Climate Adapt Website - -You are receiving this mail because %(name)s -%(mail)s -is sending feedback about the site you administer at %(url)s. -""" % info - - mime_msg = MIMEText(data.get('message') + text) - mime_msg['Subject'] = data.get('subject') - mime_msg['From'] = data.get('email') - mime_msg['To'] = str(api.portal.getSite().email_from_address) - - self.description = u"Email Sent." - - IStatusMessage(self.request).addStatusMessage( - "Email SENT", - 'info') - - return mail_host.send(mime_msg.as_string()) - else: - self.description = u"Please complete the Captcha." - - -CaptchaForm = wrap_form(ContactForm) - -# Register Captcha validator for the captcha field in the IContactForm -validator.WidgetValidatorDiscriminators( - CaptchaValidator, field=IContactForm['captcha']) - - -CaptchaFooterForm = wrap_form(ContactFooterForm) - -# Register Captcha validator for the captcha field in the IContactForm -validator.WidgetValidatorDiscriminators( - CaptchaValidator, field=IContactFooterForm['captcha']) +# class IContactForm(form.Schema): +# name = schema.TextLine(title="Name:", required=True) +# email = Email(title="Email:", required=True) +# feedback = schema.Choice( +# title="Type of feedback:", +# required=True, +# values=[ +# "Request for information", +# "Suggestion for Improvement", +# "Broken link", +# ], +# ) +# message = schema.Text(title="Message:", required=True) +# +# captcha = schema.TextLine(title="Captcha", description="", required=False) + + +# class ContactForm(form.SchemaForm): +# """Contact Form""" +# +# schema = IContactForm +# ignoreContext = True +# +# label = "Contact CLIMATE-ADAPT" +# description = """ Please use the contact form below if you have questions +# on CLIMATE-ADAPT, to suggest improvements for CLIMATE-ADAPT or to report +# broken links. +# """ +# +# fields = field.Fields(IContactForm) +# fields["captcha"].widgetFactory = CaptchaFieldWidget +# +# @button.buttonAndHandler("Submit") +# def handleApply(self, action): +# data, errors = self.extractData() +# +# if errors: +# self.status = self.formErrorsMessage +# +# return +# +# if "captcha" in data: +# # Verify the user input against the captcha +# captcha = CaptchaValidator( +# self.context, self.request, None, IContactForm["captcha"], None +# ) +# +# try: +# valid = captcha.validate(data["captcha"]) +# except WrongCaptchaCode: +# show_message( +# message="Invalid Captcha.", request=self.request, type="error" +# ) +# return +# +# if valid: +# mail_host = api.portal.get_tool(name="MailHost") +# # emailto = str(api.portal.getSite().email_from_address) +# +# mime_msg = MIMEText(data.get("message")) +# mime_msg["Subject"] = data.get("feedback") +# mime_msg["From"] = data.get("email") +# # mime_msg['To'] = ','.join(b for b in CONTACT_MAIL_LIST) +# # mime_msg['To'] = CONTACT_MAIL_LIST +# +# for m in CONTACT_MAIL_LIST: +# mime_msg["To"] = m +# +# self.description = "Email Sent." +# IStatusMessage(self.request).addStatusMessage( +# "Email SENT", "info") +# return mail_host.send(mime_msg.as_string()) +# else: +# self.description = "Please complete the Captcha." + + +# class IContactFooterForm(form.Schema): +# name = schema.TextLine(title="Name:", required=True) +# email = Email(title="Your Email:", required=True) +# subject = schema.TextLine(title="Subject", required=True) +# message = schema.Text(title="Message:", required=True) +# +# captcha = schema.TextLine(title="Captcha", description="", required=False) + + +# class ContactFooterForm(form.SchemaForm): +# """Footer Contact Form""" +# +# schema = IContactFooterForm +# ignoreContext = True +# +# label = "Contact form" +# description = """ Climate-ADAPT aims to support Europe in adapting to +# climate change. It is an initiative of the European Commission and helps +# users to access and share data and information on expected climate change +# in Europe. Fill in this form to contact the site owners. +# """ +# +# fields = field.Fields(IContactFooterForm) +# fields["captcha"].widgetFactory = CaptchaFieldWidget +# +# @button.buttonAndHandler("Submit") +# def handleApply(self, action): +# data, errors = self.extractData() +# +# if errors: +# self.status = self.formErrorsMessage +# +# return +# +# if "captcha" in data: +# # Verify the user input against the captcha +# captcha = CaptchaValidator( +# self.context, self.request, None, IContactFooterForm["captcha"], None +# ) +# +# try: +# valid = captcha.validate(data["captcha"]) +# except WrongCaptchaCode: +# show_message( +# message="Invalid Captcha.", request=self.request, type="error" +# ) +# return +# +# if valid: +# mail_host = api.portal.get_tool(name="MailHost") +# +# info = { +# "name": data.get("name"), +# "mail": data.get("email"), +# "url": self.context.absolute_url(), +# } +# text = ( +# """ +# +# Climate Adapt Website +# +# You are receiving this mail because %(name)s +# %(mail)s +# is sending feedback about the site you administer at %(url)s. +# """ +# % info +# ) +# +# mime_msg = MIMEText(data.get("message") + text) +# mime_msg["Subject"] = data.get("subject") +# mime_msg["From"] = data.get("email") +# mime_msg["To"] = str(api.portal.getSite().email_from_address) +# +# self.description = "Email Sent." +# +# IStatusMessage(self.request).addStatusMessage("Email SENT", "info") +# +# return mail_host.send(mime_msg.as_string()) +# else: +# self.description = "Please complete the Captcha." + + +# CaptchaForm = wrap_form(ContactForm) +# +# # Register Captcha validator for the captcha field in the IContactForm +# validator.WidgetValidatorDiscriminators( +# CaptchaValidator, field=IContactForm["captcha"]) +# +# +# CaptchaFooterForm = wrap_form(ContactFooterForm) +# +# # Register Captcha validator for the captcha field in the IContactForm +# validator.WidgetValidatorDiscriminators( +# CaptchaValidator, field=IContactFooterForm["captcha"] +# ) def preventFolderDeletionEvent(object, event): for obj in object.listFolderContents(): - iterate_control = obj.restrictedTraverse('@@iterate_control') + iterate_control = obj.restrictedTraverse("@@iterate_control") if iterate_control.is_checkout(): # Cancel deletion @@ -1015,13 +675,11 @@ def preventFolderDeletionEvent(object, event): class ViewGoogleAnalyticsReport(BrowserView): - """ A view to view the google analytics report data - """ + """A view to view the google analytics report data""" def report_data(self): - site = portal.get() - report = site.__annotations__.get('google-analytics-cache-data', {}) + report = site.__annotations__.get("google-analytics-cache-data", {}) reports = reversed(sorted(report.items(), key=lambda x: int(x[1]))) @@ -1029,95 +687,96 @@ def report_data(self): class DatetimeDataConverter(BaseConverter): - """ Avoid problem with missing tzinfo from default datetime widgets - """ + """Avoid problem with missing tzinfo from default datetime widgets""" def toFieldValue(self, value): - logger.warn('dateconvertwidget', value) + logger.warn("dateconvertwidget", value) value = super(DatetimeDataConverter, self).toFieldValue(value) if value is not self.field.missing_value: - if not getattr(value, 'tzinfo', None): + if not getattr(value, "tzinfo", None): value = value.replace(tzinfo=gettz()) return value class VibrioProxy(BrowserView): - url_vibrio = "https://geoportal.ecdc.europa.eu/vibriomapviewer/api/proxy" def __call__(self): response = self.request.response response.setHeader("Content-type", "application/xml") - url = self.url_vibrio + '?' + self.request["QUERY_STRING"] + url = self.url_vibrio + "?" + self.request["QUERY_STRING"] resp = requests.get(url) return resp.content -class GetCoventantOfMayorsLinks(BrowserView): - domains = ['www.covenantofmayors.eu', 'eumayors.eu', 'mayors-adapt.eu'] - - def url_needed(self, url): - for domain in self.domains: - if domain in url: - return True - - return False - - def data_to_xls(self, data): - headers = ['Location', 'Link'] - - # Create a workbook and add a worksheet. - out = BytesIO() - workbook = xlsxwriter.Workbook(out, {'in_memory': True}) - - wtitle = 'Broken-Links' - worksheet = workbook.add_worksheet(wtitle[:30]) - - for i, title in enumerate(headers): - worksheet.write(0, i, title or '') - - row_index = 1 - - for row in data: - path = row[0] - link = row[1] - worksheet.write(row_index, 0, path or '') - worksheet.write(row_index, 1, link or '') - - row_index += 1 - - workbook.close() - out.seek(0) - - return out - - def __call__(self): - links = get_links(self.context) - result = [] - - for link in links: - url = link['link'] - - if url and self.url_needed(url): - path = '/'.join(link['object_url']) - obj = self.context.unrestrictedTraverse(path) - result.append((obj.absolute_url(), url)) - - xlsio = self.data_to_xls(result) - sh = self.request.response.setHeader - - sh('Content-Type', 'application/vnd.openxmlformats-officedocument.' - 'spreadsheetml.sheet') - fname = "-".join(["CovenantOfMayorsLinks", - str(datetime.now().replace(microsecond=0))]) - sh('Content-Disposition', - 'attachment; filename=%s.xlsx' % fname) - - return xlsio.read() - - -def create_contributions_link(language='en', organisation_id=None): +# class GetCoventantOfMayorsLinks(BrowserView): +# domains = ["www.covenantofmayors.eu", "eumayors.eu", "mayors-adapt.eu"] +# +# def url_needed(self, url): +# for domain in self.domains: +# if domain in url: +# return True +# +# return False +# +# def data_to_xls(self, data): +# headers = ["Location", "Link"] +# +# # Create a workbook and add a worksheet. +# out = BytesIO() +# workbook = xlsxwriter.Workbook(out, {"in_memory": True}) +# +# wtitle = "Broken-Links" +# worksheet = workbook.add_worksheet(wtitle[:30]) +# +# for i, title in enumerate(headers): +# worksheet.write(0, i, title or "") +# +# row_index = 1 +# +# for row in data: +# path = row[0] +# link = row[1] +# worksheet.write(row_index, 0, path or "") +# worksheet.write(row_index, 1, link or "") +# +# row_index += 1 +# +# workbook.close() +# out.seek(0) +# +# return out +# +# def __call__(self): +# links = get_links(self.context) +# result = [] +# +# for link in links: +# url = link["link"] +# +# if url and self.url_needed(url): +# path = "/".join(link["object_url"]) +# obj = self.context.unrestrictedTraverse(path) +# result.append((obj.absolute_url(), url)) +# +# xlsio = self.data_to_xls(result) +# sh = self.request.response.setHeader +# +# sh( +# "Content-Type", +# "application/vnd.openxmlformats-officedocument." "spreadsheetml.sheet", +# ) +# fname = "-".join( +# ["CovenantOfMayorsLinks", str( +# datetime.now().replace(microsecond=0))] +# ) +# sh("Content-Disposition", "attachment; filename=%s.xlsx" % fname) +# +# return xlsio.read() + + +def create_contributions_link(language="en", organisation_id=None): # origin_website vocabulary? # https://github.com/eea/eea.climateadapt.plone/blob/master/eea/climateadapt/vocabulary.py#L441 @@ -1145,8 +804,8 @@ def create_contributions_link(language='en', organisation_id=None): if organisation_id in map_contributor_values: org = map_contributor_values[organisation_id] - terms.append(('cca_origin_websites.keyword', [org])) - terms.append(('language', [language])) + terms.append(("cca_origin_websites.keyword", [org])) + terms.append(("language", [language])) url = "/" + language + "/observatory/catalogue/?" query = filters_to_query(terms) diff --git a/eea/climateadapt/browser/scripts.py b/eea/climateadapt/browser/scripts.py index e51b2bf3d..518a7c26a 100644 --- a/eea/climateadapt/browser/scripts.py +++ b/eea/climateadapt/browser/scripts.py @@ -1,9 +1,15 @@ +# TODO: this file location is not ideal +from eea.climateadapt.broken_links import compute_broken_links +from eea.climateadapt.browser.external_links import ( + AdapteCCACaseStudyImporter, + DRMKCImporter, +) +from eea.climateadapt.browser.misc import _archive_news from eea.climateadapt.scripts import get_plone_site -from eea.climateadapt.browser.misc import _archive_news, compute_broken_links -from eea.climateadapt.browser.external_links import AdapteCCACaseStudyImporter, DRMKCImporter + def import_drmkc(): - """ A cron callable script to get DRMKC projects + """A cron callable script to get DRMKC projects This should be run through the zope client script running machinery,: @@ -16,7 +22,7 @@ def import_drmkc(): def sync_adaptecca_casestudies(): - """ A cron callable script to get AdapteCCA case studies + """A cron callable script to get AdapteCCA case studies This should be run through the zope client script running machinery,: @@ -28,7 +34,7 @@ def sync_adaptecca_casestudies(): def get_broken_links(): - """ A cron callable script to get data regarding broken links + """A cron callable script to get data regarding broken links This should be run through the zope client script running machinery,: @@ -39,7 +45,7 @@ def get_broken_links(): def archive_news(): - """ A cron callable script which archives news automatically + """A cron callable script which archives news automatically This should be run through the zope client script running machinery: diff --git a/eea/climateadapt/configure.zcml b/eea/climateadapt/configure.zcml index c02bb0749..8d9f6cc34 100644 --- a/eea/climateadapt/configure.zcml +++ b/eea/climateadapt/configure.zcml @@ -1,21 +1,21 @@ + xmlns="http://namespaces.zope.org/zope" + xmlns:zcml="http://namespaces.zope.org/zcml" + xmlns:browser="http://namespaces.zope.org/browser" + xmlns:five="http://namespaces.zope.org/five" + xmlns:genericsetup="http://namespaces.zope.org/genericsetup" + xmlns:grok="http://namespaces.zope.org/grok" + xmlns:i18n="http://namespaces.zope.org/i18n" + xmlns:monkey="http://namespaces.plone.org/monkey" + xmlns:plone="http://namespaces.plone.org/plone" + xmlns:z3c="http://namespaces.zope.org/z3c" + i18n_domain="eea.climateadapt" +> + package="plone.restapi" + file="permissions.zcml" + /> + /> + package="plone.resource" + file="meta.zcml" + /> + package="z3c.jbot" + file="meta.zcml" + /> - + - + - + - + - + - + + + directory="browser/pt-overrides" + layer="eea.climateadapt.interfaces.IEEAClimateAdaptInstalled" + /> - + - + - + - + - + - + - + - -------[ Marker Interface for News/Events/Links ]------- + - + - + - + - -------[ Catalog indexes adapters ]------- + + factory=".catalog.aceitem_id" + name="aceitem_id" + /> + factory=".catalog.acemeasure_id" + name="acemeasure_id" + /> + factory=".catalog.aceproject_id" + name="aceproject_id" + /> + factory=".catalog.countries" + name="countries" + /> + factory=".catalog.imported_ids" + name="imported_ids" + /> + factory=".catalog.search_type" + name="search_type" + /> + factory=".catalog.search_type_for_newsevents" + name="search_type" + /> + factory=".catalog.featured" + name="featured" + /> + factory=".catalog.get_aceitem_description" + name="Description" + /> + factory=".catalog.get_aceitem_description_indicator" + name="Description" + /> + factory=".catalog.get_aceproject_description" + name="Description" + /> + factory=".catalog.get_adaptation_option_description" + name="Description" + /> + factory=".catalog.get_casestudy_description" + name="Description" + /> + factory=".catalog.cover_description" + name="Description" + /> + factory=".catalog.image_field_indexer" + name="image_field" + /> + factory=".catalog.bio_regions" + name="bio_regions" + /> - - - ----[ Vocabularies: ]------ + factory=".catalog.macro_regions" + name="macro_regions" + /> + ----[ Vocabularies: ]------ - + name="eea.climateadapt.mission.budget_range" + component=".vocabulary.budget_ranges_vocabulary" + /> + name="eea.climateadapt.mission.type_of_funding" + component=".vocabulary.type_of_funding_vocabulary" + /> + name="eea.climateadapt.aceitems_storagetypes" + component=".vocabulary.aceitem_storagetypes_vocabulary" + /> - + name="eea.climateadapt.aceitems_sectors" + component=".vocabulary.aceitem_sectors_vocabulary" + /> + name="eea.climateadapt.aceitems_elements" + component=".vocabulary.aceitem_elements_vocabulary" + /> + name="eea.climateadapt.aceitems_elements_case_study" + component=".vocabulary.aceitem_elements_case_study_vocabulary" + /> + name="eea.climateadapt.aceitems_climateimpacts" + component=".vocabulary.aceitem_climateimpacts_vocabulary" + /> + name="eea.climateadapt.aceitems_featured" + component=".vocabulary.aceitem_featured_vocabulary" + /> + name="eea.climateadapt.aceitems_relevance" + component=".vocabulary.aceitem_relevance_vocabulary" + /> + name="eea.climateadapt.aceitems_governancelevel" + component=".vocabulary.governance_level" + /> + name="eea.climateadapt.event_language" + component=".vocabulary.language" + /> + name="eea.climateadapt.aceitems_category" + component=".vocabulary.category" + /> + name="eea.climateadapt.aceitems_ipcc_category" + component=".vocabulary.ipcc_category" + /> + name="eea.climateadapt.aceitems_key_type_measures" + component=".vocabulary.key_type_measures" + /> + name="eea.climateadapt.aceitems_key_type_measures_short" + component=".vocabulary.key_type_measures_short" + /> + name="eea.climateadapt.acemeasure_implementationtype" + component=".vocabulary.acemeasure_implementationtype_vocabulary" + /> + name="eea.climateadapt.ace_countries" + component=".vocabulary.ace_countries_vocabulary" + /> + name="eea.climateadapt.acemeasure_types" + component=".vocabulary.acemeasure_types" + /> + name="eea.climateadapt.origin_website" + component=".vocabulary.origin_website" + /> + name="eea.climateadapt.health_impacts" + component=".vocabulary.health_impacts" + /> + name="eea.climateadapt.funding_programme" + component=".vocabulary.funding_programme" + /> + name="eea.climateadapt.key_community_systems" + component=".vocabulary.key_community_systems" + /> + name="eea.climateadapt.climate_threats" + component=".vocabulary.climate_threats" + /> + name="eea.climateadapt.cca_types" + component=".vocabulary.cca_types" + /> + name="eea.climateadapt.special_tags" + component=".vocabulary.SpecialTagsVocabularyFactory" + /> + name="eea.climateadapt.keywords" + component=".vocabulary.KeywordsVocabularyFactory" + /> + name="eea.climateadapt.object_provides" + component=".vocabulary.ObjectProvidesVocabulary" + /> + name="eea.climateadapt.faceted_elements_voc" + component=".vocabulary.faceted_elements" + /> + factory=".vocabulary.AdaptationOptionsVocabulary" + name="eea.climateadapt.adaptation_options" + /> + factory=".vocabulary.CaseStudiesVocabulary" + name="eea.climateadapt.case_studies" + /> + factory=".vocabulary.OrganisationsVocabulary" + name="eea.climateadapt.organisations" + /> + name="eea.climateadapt.faceted_countries" + component=".vocabulary.faceted_countries_vocabulary" + /> + factory=".vocabulary.CCAItemsVocabulary" + name="eea.climateadapt.cca_items" + /> + name="eea.climateadapt.rich_header_level" + component=".vocabulary.rich_header_level" + /> + name="eea.climateadapt.rast_steps" + component=".vocabulary.rast_steps_vocabulary" + /> + name="eea.climateadapt.eligible_entities" + component=".vocabulary.eligible_entities_vocabulary" + /> + name="eea.climateadapt.readiness_for_use" + component=".vocabulary.readiness_for_use_vocabulary" + /> + name="eea.climateadapt.geographical_scale" + component=".vocabulary.geographical_scale_vocabulary" + /> + name="eea.climateadapt.tool_language" + component=".vocabulary.tool_language_vocabulary" + /> + name="eea.climateadapt.most_useful_for" + component=".vocabulary.most_useful_for_vocabulary" + /> + name="eea.climateadapt.user_requirements" + component=".vocabulary.user_requirements_vocabulary" + /> - ------[ Marker interfaces: ]-------- - - - - - - - - + + + + + + + + + - ------[ Widgets: ]-------- + - + - ------[ Subscribers: ]-------- + + handler="eea.facetednavigation.caching.cache.invalidateFacetedCache" + /> + handler="eea.facetednavigation.caching.cache.invalidateFacetedCache" + /> + handler=".events.invalidate_cache_faceted_object_row" + /> + handler=".events.invalidate_cache_faceted_object_row" + /> + handler=".acemeasure.handle_measure_added" + /> - + factory=".marshaller.GeoCharsFieldModifier" + provides="eea.rdfmarshaller.interfaces.ISurfResourceModifier" + /> + factory=".marshaller.CountryModifier" + provides="eea.rdfmarshaller.interfaces.ISurfResourceModifier" + /> + factory=".marshaller.ContributorModifier" + provides="eea.rdfmarshaller.interfaces.ISurfResourceModifier" + /> + factory=".marshaller.IssuedFieldModifier" + provides="eea.rdfmarshaller.interfaces.ISurfResourceModifier" + /> - + - + + factory=".marshaller.CountryTitle2Surf" + name="title" + /> - ------[ Workflow Message Functionality: ]-------- + + for=".workflow.IWorkflowMessageEvent" + handler=".events.trigger_contentrules" + /> + interface=".workflow.IWorkflowMessageEvent" + type="plone.contentrules.rule.interfaces.IRuleEventType" + name="ClimateAdapt: workflow transition with message" + /> + factory=".workflow.workflow_message" + provides="plone.stringinterp.interfaces.IStringSubstitution" + for="*" + name="cca_workflow_message" + /> + name="set_workflow_message" + for="Products.CMFCore.interfaces.IContentish" + class=".workflow.WorkflowTransitionMessage" + permission="cmf.ReviewPortalContent" + /> - ------[ Workflow transitions: ]-------- + + handler=".events.handle_workflow_change" + /> - ------[ Indicator Message Functionality: ]-------- + + for=".indicator.IIndicatorMessageEvent" + handler=".events.trigger_indicator_contentrule" + /> + interface=".indicator.IIndicatorMessageEvent" + type="plone.contentrules.rule.interfaces.IRuleEventType" + name="Indicator modified" + /> + factory=".indicator.indicator_message" + provides="plone.stringinterp.interfaces.IStringSubstitution" + for="*" + name="cca_indicator_message" + /> + handler=".events.handle_iterate_wc_deletion" + /> + factory=".traversal.AcquisitionNamespace" + name="aq" + /> - - ------[ Monkey Patched ]-------- - - - - - - + handler=".events.remove_broken_relations" + /> - - - - - - - - - + diff --git a/eea/climateadapt/patches.zcml b/eea/climateadapt/patches.zcml new file mode 100644 index 000000000..b454fae35 --- /dev/null +++ b/eea/climateadapt/patches.zcml @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + +