From a7a060bbbc04966ac29e96f7a24312bf98c80af2 Mon Sep 17 00:00:00 2001
From: Chris Hambridge <chambrid@redhat.com>
Date: Tue, 1 Sep 2020 14:07:12 -0400
Subject: [PATCH 1/2] Use extra_vars and CostManagement CR to drive data
 collection.

---
 .gitignore                                |   5 +-
 molecule/test-local/converge.yml          |  52 +---
 roles/collect/tasks/main.yml              |   8 +
 roles/setup/defaults/main.yml             |  26 +-
 roles/setup/files/package_report.py       | 300 +++++++++++++++++++
 roles/setup/files/trusted_ca_certmap.yaml |  11 +
 roles/setup/tasks/collect.yml             | 334 ++++++++++++++++++++++
 roles/setup/tasks/main.yml                | 140 +++++++--
 roles/setup/tasks/update-status.yml       |   8 +
 watches.yaml                              |   8 +-
 10 files changed, 815 insertions(+), 77 deletions(-)
 create mode 100755 roles/setup/files/package_report.py
 create mode 100644 roles/setup/files/trusted_ca_certmap.yaml
 create mode 100644 roles/setup/tasks/collect.yml
 create mode 100644 roles/setup/tasks/update-status.yml

diff --git a/.gitignore b/.gitignore
index def13cb..77c640e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,4 +8,7 @@ molecule/test-local/.kube/
 testing/
 
 # vscode settings
-.vscode/
\ No newline at end of file
+.vscode/
+
+# pyenv ignore
+.python-version
diff --git a/molecule/test-local/converge.yml b/molecule/test-local/converge.yml
index 698dbf4..f5209f3 100644
--- a/molecule/test-local/converge.yml
+++ b/molecule/test-local/converge.yml
@@ -201,21 +201,7 @@
           ignore_errors: yes
           failed_when: false
           command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c operator
-          environment:
-            KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
-          vars:
-            definition: "{{ lookup('template',
-              '/'.join([deploy_dir, 'operator.yaml'])) | from_yaml }}"
-          register: log
-
-        - debug: var=log.stdout_lines
-
-        - name: get ansible logs
-          ignore_errors: yes
-          failed_when: false
-          command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c ansible
+            deployment/{{ definition.metadata.name }} -n {{ namespace }}
           environment:
             KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
           vars:
@@ -396,21 +382,7 @@
           ignore_errors: yes
           failed_when: false
           command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c operator
-          environment:
-            KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
-          vars:
-            definition: "{{ lookup('template',
-              '/'.join([deploy_dir, 'operator.yaml'])) | from_yaml }}"
-          register: log
-
-        - debug: var=log.stdout_lines
-
-        - name: get ansible logs
-          ignore_errors: yes
-          failed_when: false
-          command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c ansible
+            deployment/{{ definition.metadata.name }} -n {{ namespace }}
           environment:
             KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
           vars:
@@ -420,9 +392,6 @@
 
         - debug: var=log.stdout_lines
 
-        - fail:
-            msg: "Failed on action: converge"
-
 - name: Start server on Operator container
   hosts: k8s
   tasks:
@@ -575,23 +544,6 @@
 
         - debug: var=log.stdout_lines
 
-        - name: get ansible logs
-          ignore_errors: yes
-          failed_when: false
-          command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c  ansible
-          environment:
-            KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
-          vars:
-            definition: "{{ lookup('template',
-              '/'.join([deploy_dir, 'operator.yaml'])) | from_yaml }}"
-          register: log
-
-        - debug: var=log.stdout_lines
-
-        - fail:
-            msg: "Failed on action: converge"
-
 - name: Copy tar.gz to k8s from Operator
   hosts: k8s
   tasks:
diff --git a/roles/collect/tasks/main.yml b/roles/collect/tasks/main.yml
index 08ca3d5..7450e4f 100644
--- a/roles/collect/tasks/main.yml
+++ b/roles/collect/tasks/main.yml
@@ -1,5 +1,13 @@
 ---
 
+- name: print _cost_mgmt_data_openshift_io_costmanagementdata
+  debug:
+    var: _cost_mgmt_data_openshift_io_costmanagementdata
+
+- name: print meta
+  debug:
+    var: meta
+
 - name: Obtain cost-mgmt-setup info
   community.kubernetes.k8s_info:
     api_version: cost-mgmt.openshift.io/v1alpha1
diff --git a/roles/setup/defaults/main.yml b/roles/setup/defaults/main.yml
index d3fd11d..f6ca34a 100644
--- a/roles/setup/defaults/main.yml
+++ b/roles/setup/defaults/main.yml
@@ -1,8 +1,32 @@
 ---
-namespace: openshift-metering
 current_day: '{{ ansible_date_time.day | int }}'
 current_month: '{{ ansible_date_time.month | int }}'
 current_year: '{{ ansible_date_time.year | int }}'
 setup_template_path: '/tmp/cost-mgmt-operator-collect'
 setup_template_dir: "{{ lookup('password', '/dev/null chars=ascii_letters') }}"
 setup_delete_after: 'true'
+upload_cycle_seconds: 21600
+collect_data: 'false'
+collect_format: 'csv'
+collect_manifest_uuid: '{{ 99999999999999999999 | random | to_uuid }}'
+collect_archive_name: cost-mgmt
+ocp_validate_cert: 'true'
+ocp_cluster_id: ''
+reporting_operator_token_name: ''
+collect_reports:
+  - 'cm-openshift-usage-lookback-'
+  - 'cm-openshift-persistentvolumeclaim-lookback-'
+  - 'cm-openshift-node-labels-lookback-'
+collect_download_path: '/tmp/cost-mgmt-operator-collect'
+collect_delete_after: 'true'
+collect_ocp_report_timeout: 60
+collect_max_csvfile_size: 99
+api_prefix: 'https://'
+ingress_url: 'https://cloud.redhat.com/api/ingress/v1/upload'
+authentication: 'token'
+authentication_token: ''
+username: ''
+password: ''
+cacert_path: '{{ collect_download_path }}/ca-bundle.crt'
+debug: 'true'
+collect_upload_wait: '{{ 2100 | random(step=10) }}'
diff --git a/roles/setup/files/package_report.py b/roles/setup/files/package_report.py
new file mode 100755
index 0000000..6ec0d88
--- /dev/null
+++ b/roles/setup/files/package_report.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Red Hat, Inc.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program.  If not, see <https://www.gnu.org/licenses/>.
+#
+"""Create a tarball for metering reports downloaded from an OpenShift cluster."""
+
+import argparse
+import csv
+import logging
+import json
+import os
+import sys
+import tarfile
+from datetime import datetime
+from uuid import uuid4
+
+DEFAULT_MAX_SIZE = 100
+MEGABYTE = 1024 * 1024
+
+TEMPLATE = {
+    "files": None,
+    "date": datetime.utcnow().isoformat(),
+    "uuid": None,
+    "cluster_id": None
+}
+
+
+# the csv module doesn't expose the bytes-offset of the
+# underlying file object.
+#
+# instead, the script estimates the size of the data as VARIANCE percent larger than a
+# naïve string concatenation of the CSV fields to cover the overhead of quoting
+# and delimiters. This gets close enough for now.
+VARIANCE = 0.03
+
+# Flag to use when writing to a file. Changed to "w" by the -o flag.
+FILE_FLAG = "x"
+
+# if we're creating more than 1k files, something is probably wrong.
+MAX_SPLITS = 1000
+
+# logging
+LOG = logging.getLogger(__name__)
+LOG_FORMAT = "%(asctime)s [%(levelname)s] %(message)s"
+LOG_VERBOSITY = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
+logging.basicConfig(format=LOG_FORMAT, level=logging.ERROR, stream=sys.stdout)
+
+
+def parse_args():
+    """Handle CLI arg parsing."""
+    parser = argparse.ArgumentParser(
+        description="Cost Management CSV file packaging script", prog=sys.argv[0])
+
+    # required args
+    parser.add_argument("-f", "--filepath", required=True,
+                        help="path to files to package")
+    parser.add_argument(
+        "-s",
+        "--max-size",
+        type=int,
+        default=DEFAULT_MAX_SIZE,
+        help=f"Maximum size of packages in MiB. (Default: {DEFAULT_MAX_SIZE} MiB)",
+    )
+    parser.add_argument(
+        "-o", "--overwrite", action="store_true", default=False, help="whether to overwrite existing files."
+    )
+    parser.add_argument("--ocp-cluster-id", required=True,
+                        help="OCP Cluster ID")
+    parser.add_argument("-v", "--verbosity", action="count",
+                        default=0, help="increase verbosity (up to -vvv)")
+    return parser.parse_args()
+
+
+def write_part(filename, csvreader, header, num=0, size=(DEFAULT_MAX_SIZE * MEGABYTE)):
+    """Split a part of the file into a new file.
+
+    Args:
+        filename (str) name of original file
+        csvreader (CSVReader) the csvreader object of original file
+        header (list) the CSV file's header list
+        num (int) the current split file index
+        size (int) the maximum size of the split file in bytes
+
+    Returns:
+        (str) the name of the new split file
+        (bool) whether the split reached the end of the csvreader
+
+    """
+    fname_part, ext = os.path.splitext(filename)
+    size_estimate = 0
+    split_filename = f"{fname_part}_{num}{ext}"
+    try:
+        with open(split_filename, FILE_FLAG) as split_part:
+            LOG.info(f"Writing new file: {split_filename}")
+            csvwriter = csv.writer(split_part)
+            csvwriter.writerow(header)
+            for row in csvreader:
+                csvwriter.writerow(row)
+
+                row_len = len(",".join(row))
+                size_estimate += row_len + (row_len * VARIANCE)
+
+                LOG.debug(f"file size (est): {size_estimate}")
+                if size_estimate >= size:
+                    return (split_filename, False)
+    except (IOError, FileExistsError) as exc:
+        LOG.critical(f"Fatal error: {exc}")
+        sys.exit(2)
+    return (split_filename, True)
+
+
+def need_split(filepath, max_size):
+    """Determine whether to split up the CSV files.
+
+    Args:
+        filepath (str) a directory
+        max_size (int) maximum split size in MiB
+
+    Returns:
+        True if any single file OR the total sum of files exceeds the MAX_SIZE
+        False if each single file AND the total file size is below MAX_SIZE
+
+    """
+    total_size = 0
+    max_bytes = max_size * MEGABYTE
+    for filename in os.listdir(filepath):
+        this_size = os.stat(f"{filepath}/{filename}").st_size
+        total_size += this_size
+        if this_size >= max_bytes or total_size >= max_bytes:
+            return True
+    return False
+
+
+def split_files(filepath, max_size):
+    """Split any files that exceed the file size threshold.
+
+    Args:
+        filepath (str) file path containing the CSV files
+        max_size (int) the maximum size in MiB for each file
+
+    """
+    for filename in os.listdir(filepath):
+        abspath = f"{filepath}/{filename}"
+        if os.stat(abspath).st_size >= max_size * MEGABYTE:
+            csvheader = None
+            split_files = []
+            with open(abspath, "r") as fhandle:
+                csvreader = csv.reader(fhandle)
+                csvheader = next(csvreader)
+                LOG.debug(f"Header: {csvheader}")
+
+                part = 1
+                while True:
+                    newfile, eof = write_part(
+                        abspath, csvreader, csvheader, num=part, size=(max_size * MEGABYTE))
+                    split_files.append(newfile)
+                    part += 1
+                    if eof or part >= MAX_SPLITS:
+                        break
+
+            os.remove(abspath)
+
+            # return the list of split files to stdout
+            LOG.info(f"Split files: {split_files}")
+
+
+def render_manifest(args, archivefiles=[]):
+    """Render the manifest template and write it to a file.
+
+    Args:
+        args (Namespace) an ArgumentParser Namespace object
+
+    Returns:
+        (str) the rendered manifest file name
+        (str) the manifest uuid
+    """
+    manifest = TEMPLATE
+    manifest_uuid = str(uuid4())
+    manifest["cluster_id"] = args.ocp_cluster_id
+    manifest["uuid"] = manifest_uuid
+    manifest_files = []
+    for idx in range(len(archivefiles)): 
+        upload_name = f"{manifest_uuid}_openshift_usage_report.{idx}.csv"
+        manifest_files.append(upload_name)
+    manifest["files"] = manifest_files
+    LOG.debug(f"rendered manifest: {manifest}")
+    manifest_filename = f"{args.filepath}/manifest.json"
+
+    if not os.path.exists(args.filepath):
+        os.makedirs(args.filepath)
+        LOG.info(f"Created dirs: {args.filepath}")
+
+    try:
+        with open(manifest_filename, FILE_FLAG) as mfile:
+            json.dump(manifest, mfile)
+    except FileExistsError as exc:
+        LOG.critical(f"Fatal error: {exc}")
+        sys.exit(2)
+    LOG.info(f"manifest generated")
+    return (manifest_filename, manifest_uuid)
+
+
+def write_tarball(args, tarfilename, manifest_filename, manifest_uuid, archivefiles, file_count=0):
+    """Write a tarball, adding the given files to the archive.
+
+    Args:
+        args (Namespace) an ArgumentParser Namespace object
+        tarfilename (str) the name of the tarball to create
+        manifest_filename (str) the name of the report manifest
+        manifest_uuid (str) the unique identifier of the manifest
+        archivefiles (list) the list of files to include in the archive
+        file_count (int) file number initializer
+
+    Returns:
+        (str) full filepath of the created tarball
+
+    Raises:
+        FileExistsError if tarfilename already exists
+    """
+    if not archivefiles:
+        return None
+    
+    try:
+        with tarfile.open(tarfilename, f"{FILE_FLAG}:gz") as tarball:
+            for fname in archivefiles:
+                LOG.debug(f"Adding {fname} to {tarfilename}: ")
+                if fname.endswith(".csv"):
+                    upload_name = f"{manifest_uuid}_openshift_usage_report.{file_count}.csv"
+                    tarball.add(fname, arcname=upload_name)
+                    file_count += 1
+            tarball.add(manifest_filename, arcname="manifest.json")
+    except FileExistsError as exc:
+        LOG.critical(exc)
+        sys.exit(2)
+    LOG.info(f"Wrote: {tarfilename}")
+    return f"{tarfilename}", file_count
+
+
+def build_local_csv_file_list(staging_directory):
+    """Build a list of all report csv files in staging directory."""
+    file_list = []
+    for csv_file in os.listdir(staging_directory):
+        if ".csv" in csv_file:
+            file_list.append(f"{staging_directory}/{csv_file}")
+    return file_list
+
+if "__main__" in __name__:
+    args = parse_args()
+    if args.verbosity:
+        LOG.setLevel(LOG_VERBOSITY[args.verbosity])
+    LOG.debug("CLI Args: %s", args)
+
+    if args.overwrite:
+        FILE_FLAG = "w"
+
+    out_files = []
+    need_split = need_split(args.filepath, args.max_size)
+    if need_split:
+        split_files(args.filepath, args.max_size)
+        tarpath = args.filepath + "/../"
+        tarfiletmpl = "cost-mgmt{}.tar.gz"
+
+        file_list = build_local_csv_file_list(args.filepath)
+        manifest_filename, manifest_uuid = render_manifest(args, file_list)
+        file_count = 0
+        for idx, filename in enumerate(file_list):
+            if ".csv" in filename:
+                tarfilename = os.path.abspath(
+                    tarpath + tarfiletmpl.format(idx))
+                output_tar, file_count = write_tarball(args, 
+                    tarfilename, manifest_filename, manifest_uuid, [filename], file_count)
+                if output_tar:
+                    out_files.append(output_tar)
+
+    else:
+        tarfilename = os.path.abspath(args.filepath + "/../cost-mgmt.tar.gz")
+
+        file_list = build_local_csv_file_list(args.filepath)
+        if file_list:
+            manifest_filename, manifest_uuid = render_manifest(args, file_list)
+            output_tar, _ = write_tarball(args, tarfilename, manifest_filename, manifest_uuid, file_list)
+            if output_tar:
+                out_files.append(output_tar)
+
+    for fname in out_files:
+        print(fname)
diff --git a/roles/setup/files/trusted_ca_certmap.yaml b/roles/setup/files/trusted_ca_certmap.yaml
new file mode 100644
index 0000000..233b3b0
--- /dev/null
+++ b/roles/setup/files/trusted_ca_certmap.yaml
@@ -0,0 +1,11 @@
+---
+
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  namespace: openshift-metering
+  name: trusted-ca-bundle
+  annotations:
+    release.openshift.io/create-only: "true"
+  labels:
+    config.openshift.io/inject-trusted-cabundle: "true"
diff --git a/roles/setup/tasks/collect.yml b/roles/setup/tasks/collect.yml
new file mode 100644
index 0000000..9527737
--- /dev/null
+++ b/roles/setup/tasks/collect.yml
@@ -0,0 +1,334 @@
+---
+
+- name: Set cluster ID
+  set_fact:
+    ocp_cluster_id: "{{ current_cr_spec.clusterID }}"
+  when: current_cr_spec.clusterID
+
+- name: Check for OCP clusterID
+  debug: msg='OCP clusterID is not defined'
+  when: not ocp_cluster_id
+
+- name: Set validation boolean
+  set_fact:
+    ocp_validate_cert: "{{ current_cr_spec.validate_cert }}"
+  when: current_cr_spec.validate_cert
+
+- name: Check for validation boolean
+  debug: msg='HTTPS certificate validation variable is not defined; defaulting to true'
+  when: not current_cr_spec.validate_cert
+
+- name: Set service account token name
+  set_fact:
+    reporting_operator_token_name: "{{ current_cr_spec.reporting_operator_token_name }}"
+  when: current_cr_spec.reporting_operator_token_name
+
+- name: Check for service account token name
+  debug: msg='Reporting Operator service account token name is not defined'
+  when: not reporting_operator_token_name
+
+- name: Fail if the clusterID or service token are not defined
+  fail:
+    msg: 'The CostManagement custom resource requires the clusterID and reporting_operator_token_name to be defined.'
+  when: not ocp_cluster_id or not reporting_operator_token_name
+
+- name: Set upload_wait
+  set_fact:
+    collect_upload_wait: "{{ current_cr_spec.upload_wait | int }}"
+  when: current_cr_spec.upload_wait
+  ignore_errors: true
+
+- name: Format current_month string if less than 10
+  set_fact:
+    current_month: '{{ "0" + (current_month | string) }}'
+  when:
+    - (current_month | int)  < 10
+
+- name: Set monthly suffix for reports
+  set_fact:
+    current_year_month: '{{ (current_year | string )  + (current_month | string) }}'
+
+- name: Obtain metering api info
+  community.kubernetes.k8s_info:
+    api_version: v1
+    kind: Route
+    namespace: "{{ namespace }}"
+  register: metering_route
+
+- name: Set metering api route
+  set_fact:
+    metering_api_route: "{{ api_prefix }}{{ metering_route.resources[0].spec.host }}/api/v1/reports/get"
+  when: metering_route.resources
+
+- name: Get the service account token
+  community.kubernetes.k8s_info:
+    api_version: v1
+    kind: Secret
+    namespace: "{{ namespace }}"
+    name: "{{ reporting_operator_token_name }}"
+  register: reporting_token
+
+- name: Set authentication_secret name
+  set_fact:
+    authentication_secret_name: "{{ current_cr_spec.authentication_secret_name }}"
+  when: current_cr_spec.authentication_secret_name
+
+- name: debug auth secret name
+  debug:
+    var: authentication_secret_name
+  when: debug
+
+- name: Set the authentication method
+  set_fact:
+    authentication: "{{ current_cr_spec.authentication }}"
+  when: current_cr_spec.authentication
+  ignore_errors: true
+
+- name: debug auth method
+  debug:
+    var: authentication
+  when: debug
+
+- name: Set the ingress URL
+  set_fact:
+    ingress_url: "{{ current_cr_spec.ingress_url }}"
+  when: current_cr_spec.ingress_url
+  ignore_errors: true
+
+- name: debug ingress URL
+  debug:
+    var: ingress_url
+  when: debug
+
+- name: Fail if auth secret is not set
+  fail:
+    msg: 'The cost-mgmt-setup custom resource requires the authentication_secret_name to be defined.'
+  when: not authentication_secret_name
+
+- name: Get the authentication secret
+  community.kubernetes.k8s_info:
+    api_version: v1
+    kind: Secret
+    namespace: "{{ namespace }}"
+    name: "{{ authentication_secret_name }}"
+  register: authentication_secret
+
+- name: Decode the service account token
+  set_fact:
+    reporting_operator_token: "{{ reporting_token.resources[0].data.token | b64decode }}"
+  when: reporting_token.resources
+
+- name: Fail when reporting_operator_token not defined
+  fail:
+    msg: 'Reporting Operator token does not exist'
+  when: not reporting_operator_token
+
+- name: Fail if the authentication secret could not be found
+  fail:
+    msg: 'The authentication secret could not be found.'
+  when: not authentication_secret.resources
+
+- name: If authentication is set to token, get the auth token
+  set_fact:
+    authentication_token: "{{ authentication_secret.resources[0].data.token }}"
+  when: authentication_secret.resources and authentication == 'token'
+
+- name: If authentication is set to basic then grab username and password
+  set_fact:
+    username: "{{ authentication_secret.resources[0].data.username | b64decode }}"
+    password: "{{ authentication_secret.resources[0].data.password | b64decode }}"
+  when: authentication_secret.resources and authentication == 'basic'
+
+- name: Fail if no token but token is specified
+  fail:
+    msg: 'The authentication method was set to token but the authentication secret did not contain a token.'
+  when: authentication == 'token' and not authentication_token
+
+- name: Fail if no username but basic authentication is specified
+  fail:
+    msg: 'The authentication method was set to basic but the authentication secret did not contain a username.'
+  when: authentication == 'basic' and not username
+
+- name: Fail if no password but basic authentication is specified
+  fail:
+    msg: 'The authentication method was set to basic but the authentication secret did not contain a password.'
+  when: authentication == 'basic' and not password
+
+- name: Check if cert file exists
+  stat:
+    path: "{{ cacert_path }}"
+  register: trusted_cert
+
+- name: Fail if the trusted cert does not exist
+  fail:
+    msg: 'Failing because the ssl certificate does not exist.'
+  when: not trusted_cert
+
+- name: Obtain the source commit from file
+  set_fact:
+    source_commit: "{{ lookup('file', ansible_env.HOME + '/commit') }}"
+
+- name: debug the source_commit
+  debug:
+    var: source_commit
+  when: debug
+
+- name: Create trusted-ca-bundle if it doesn't exist
+  community.kubernetes.k8s:
+    namespace: "{{ namespace }}"
+    state: present
+    src: '{{ ansible_env.HOME }}/roles/collect/files/trusted_ca_certmap.yaml'
+
+- name: Get the trusted-ca-bundle
+  community.kubernetes.k8s_info:
+    api_version: v1
+    kind: ConfigMap
+    namespace: "{{ namespace }}"
+    name: "trusted-ca-bundle"
+  register: trusted_ca_bundle
+
+- name: Set the trusted-ca-bundle crt contents
+  set_fact:
+    trusted_ca_bundle_contents: "{{ trusted_ca_bundle.resources[0].data['ca-bundle.crt'] }}"
+  when: trusted_ca_bundle.resources
+
+- name: Write the trusted-ca-bundle contents to a file
+  copy: content="{{ trusted_ca_bundle_contents }}" dest="{{ cacert_path }}"
+  when: trusted_ca_bundle_contents is defined
+
+- name: Fail the trusted ca certificate could not be found and certificate validation is enabled
+  fail:
+    msg: 'The trusted ca certificate could not be found and certificate validation is enabled.'
+  when: trusted_ca_bundle_contents is not defined
+
+- name: Set download request facts
+  set_fact:
+    collect_file_prefix: '{{ collect_manifest_uuid }}'
+    format: "&format={{ collect_format }}"
+    namespace: "&namespace={{ namespace }}"
+
+# getting a little clever to build lists to append into
+- name: initialize fact lists
+  set_fact:
+    api_params: []
+    api_urls: []
+    csv_files: []
+
+# this appends the string inside the brackets to the 'api_params' list.
+- name: compile URL query params, append to param list
+  set_fact:
+    api_params: "{{ api_params + ['?name='+item+current_year_month+format+namespace] }}"
+  with_items: "{{ collect_reports }}"
+
+# this appends the string inside the brackets to the 'api_urls' list.
+- name: assemble compiled URL facts, append to list.
+  set_fact:
+    api_urls: "{{ api_urls + [metering_api_route+item] }}"
+  with_items: "{{ api_params }}"
+
+- name: Set download_path
+  set_fact:
+    collect_cluster_download_path: '{{ collect_download_path }}/{{  ocp_cluster_id }}'
+
+- name: Remove temp files
+  file:
+    path: '{{ collect_cluster_download_path }}'
+    state: absent
+  when: collect_delete_after | bool
+
+- name: Create temp dir for downloaded files
+  file:
+    path: '{{ collect_cluster_download_path }}'
+    state: directory
+    mode: 0777
+
+- name: Download OCP report from endpoint
+  get_url:
+    url: '{{ item }}'
+    headers:
+      Authorization: "Bearer {{ reporting_operator_token }}"
+    dest: '{{ collect_cluster_download_path }}/{{ collect_file_prefix }}_openshift_usage_report.{{ idx }}.{{ collect_format }}'
+    validate_certs: '{{ ocp_validate_cert | bool }}'
+    timeout: '{{ collect_ocp_report_timeout }}'
+  with_items: "{{ api_urls }}"
+  loop_control:
+    index_var: idx
+  register: download_result
+
+- name: debug download result
+  debug:
+    var: download_result
+  when: debug
+
+- name: append filename to fact list
+  set_fact:
+    csv_files: "{{ csv_files + [item.dest | basename] }}"
+  with_items: "{{ download_result.results }}"
+
+- name: debug csv_files
+  debug:
+    var: csv_files
+  when: debug
+
+- name: Check that required files exist
+  stat:
+    path: '{{ collect_cluster_download_path + "/" + item }}'
+  register: csv_stat_result
+  with_items:
+    - '{{ csv_files }}'
+
+- name: debug the csv_stat_result
+  debug:
+    var: csv_stat_result
+  when: debug
+
+- name: Check for empty download results
+  fail:
+    msg: 'Downloaded file {{ item }} has no content or could not be found: {{ item.stat }}.'
+  when: not item.stat.exists or (item.stat.exists and item.stat.size <= 0)
+  with_items:
+    - '{{ csv_stat_result.results }}'
+
+- name: Run packaging script to prepare reports for sending to Insights
+  script: package_report.py --filepath {{ ocp_cluster_id }} --max-size {{ collect_max_csvfile_size }} --ocp-cluster-id {{ ocp_cluster_id }} --overwrite
+  args:
+    chdir: '{{ collect_download_path }}'
+  register: packaged_reports
+
+- name: Wait time before upload in seconds
+  debug:
+    var: collect_upload_wait
+
+- name: Wait before upload to space out metric delivery
+  wait_for:
+    timeout: '{{ collect_upload_wait }}'
+  delegate_to: localhost
+
+- name: Upload the cost report to ingress using basic auth
+  shell:
+    cmd: 'curl -vvvv -F "file=@{{ item }};type=application/vnd.redhat.hccm.tar+tgz" {{ ingress_url }} -u {{ username }}:{{ password }} --cacert {{ cacert_path }}'
+    chdir: '{{ collect_download_path }}'
+  with_items:
+    - '{{ packaged_reports.stdout_lines }}'
+  when: authentication == 'basic'
+
+- name: Upload the cost report to ingress using token auth
+  shell:
+    cmd: 'curl -vvvv -F "file=@{{ item }};type=application/vnd.redhat.hccm.tar+tgz" {{ ingress_url }} -H "Authorization: Bearer {{ authentication_token }}" -H "User-Agent: cost-mgmt-operator/{{ source_commit }} cluster/{{ ocp_cluster_id }}" --cacert {{ cacert_path }}'
+    chdir: '{{ collect_download_path }}'
+  with_items:
+    - '{{ packaged_reports.stdout_lines }}'
+  when: authentication == 'token'
+
+- name: Remove upload files
+  file:
+    path: '{{ collect_download_path }}/{{ item }}'
+    state: absent
+  with_items:
+    - '{{ packaged_reports.stdout_lines }}'
+  when: collect_delete_after | bool
+
+- include_tasks: update-status.yml
+  vars:
+    status_vars:
+      upload_attempt_time: "{{ ansible_date_time.iso8601 }}"
diff --git a/roles/setup/tasks/main.yml b/roles/setup/tasks/main.yml
index 0ef399a..7325093 100644
--- a/roles/setup/tasks/main.yml
+++ b/roles/setup/tasks/main.yml
@@ -1,9 +1,97 @@
 ---
+
+- name: print hostvars[localhost]._cost_mgmt_openshift_io_costmanagement
+  debug:
+    var: hostvars["localhost"]._cost_mgmt_openshift_io_costmanagement
+  when:
+    - hostvars is defined
+    - hostvars["localhost"] is defined
+    - hostvars["localhost"]._cost_mgmt_openshift_io_costmanagement is defined
+  ignore_errors: true
+
+- name: print _cost_mgmt_openshift_io_costmanagement_spec
+  debug:
+    var: _cost_mgmt_openshift_io_costmanagement_spec
+  when:
+    - _cost_mgmt_openshift_io_costmanagement_spec is defined
+  ignore_errors: true
+
+- name: Set current cr facts
+  set_fact:
+    current_cr: "{{ hostvars['localhost']._cost_mgmt_openshift_io_costmanagement }}"
+    current_cr_spec: "{{ _cost_mgmt_openshift_io_costmanagement_spec }}"
+  when:
+    - hostvars is defined
+    - hostvars["localhost"] is defined
+    - hostvars["localhost"]._cost_mgmt_openshift_io_costmanagement is defined
+    - _cost_mgmt_openshift_io_costmanagement_spec is defined
+  ignore_errors: true
+
+- name: print meta
+  debug:
+    var: meta
+
+- name: Get upload_attempt_time
+  set_fact:
+    last_upload_attempt_time: "{{ current_cr.status.upload_attempt_time }}"
+    current_time: "{{ ansible_date_time.iso8601 }}"
+  when:
+    - current_cr is defined
+    - current_cr.status is defined
+    - current_cr.status.upload_attempt_time is defined
+  ignore_errors: true
+
+- name: print last_upload_attempt_time
+  debug:
+    var: last_upload_attempt_time
+  when: last_upload_attempt_time is defined
+  ignore_errors: true
+
+- name: iso8601 to timestamp
+  set_fact:
+    last_upload_attempt_timestamp: "{{ last_upload_attempt_time | regex_replace('[T, Z]+', ' ') | trim }}"
+    current_timestamp: "{{ current_time | regex_replace('[T, Z]+', ' ') | trim }}"
+  when: last_upload_attempt_time is defined
+  ignore_errors: true
+
+- name: print last_upload_attempt_timestamp
+  debug:
+    var: last_upload_attempt_timestamp
+  when: last_upload_attempt_timestamp is defined
+
+- name: timestamp to datetime
+  set_fact:
+    last_upload_attempt_datetime: "{{ last_upload_attempt_timestamp | to_datetime }}"
+    current_datetime: "{{ current_timestamp | to_datetime }}"
+    upload_time_diff: "{{ ((current_timestamp | to_datetime) - (last_upload_attempt_timestamp | to_datetime)).total_seconds() }}"
+  when: last_upload_attempt_timestamp is defined
+  ignore_errors: true
+
+- name: print last_upload_attempt_datetime
+  debug:
+    var: last_upload_attempt_datetime
+  when: last_upload_attempt_datetime is defined
+
+- name: print upload_time_diff
+  debug:
+    var: upload_time_diff
+  when: upload_time_diff is defined
+
+- name: Set collect_data
+  set_fact:
+    collect_data: true
+  when: upload_time_diff is not defined or (( upload_time_diff | int ) > upload_cycle_seconds)
+
+- name: print collect_data
+  debug:
+    var: collect_data
+  when: collect_data is defined
+
 - name: Search for metering resources
   community.kubernetes.k8s_info:
     api_version: v1
     kind: MeteringConfig
-    namespace: "{{ namespace }}"
+    namespace: "{{ meta.namespace }}"
   register: metering_objects
 
 - name: Ensure metering has been configured
@@ -11,35 +99,37 @@
     msg: 'Metering has not been configured.'
   when: not metering_objects.resources
 
-- name: Obtain cost-mgmt-setup info
-  community.kubernetes.k8s_info:
-    api_version: cost-mgmt.openshift.io/v1alpha1
-    kind: CostManagement
-    namespace: "{{ namespace }}"
-    name: cost-mgmt-setup
-  register: cost_mgmt_setup
+- name: Set upload_cycle_seconds
+  set_fact:
+    upload_cycle_seconds: "{{ current_cr_spec.upload_cycle_seconds | int }}"
+  when:
+    - current_cr_spec is defined
+    - current_cr_spec.upload_cycle_seconds is defined
 
 - name: Set current_day
   set_fact:
-    current_day: "{{ cost_mgmt_setup.resources[0].spec.current_day | int }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
+    current_day: "{{ current_cr_spec.current_day | int }}"
+  when:
+    - current_cr_spec is defined
+    - current_cr_spec.current_day is defined
 
 - name: Set current_month
   set_fact:
-    current_month: "{{ cost_mgmt_setup.resources[0].spec.current_month | int }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
+    current_month: "{{ current_cr_spec.current_month | int }}"
+  when:
+    - current_cr_spec is defined
+    - current_cr_spec.current_month is defined
 
 - name: Set current_year
   set_fact:
-    current_year: "{{ cost_mgmt_setup.resources[0].spec.current_year | int }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
+    current_year: "{{ current_cr_spec.current_year | int }}"
+  when:
+    - current_cr_spec is defined
+    - current_cr_spec.current_year is defined
 
 - name: Create datasources and reportqueries
   community.kubernetes.k8s:
-    namespace: "{{ namespace }}"
+    namespace: "{{ meta.namespace }}"
     state: present
     src: "{{ item }}"
   with_fileglob:
@@ -51,6 +141,7 @@
     input_month: '{{ current_month }}'
     input_year: '{{ current_year }}'
     month_delta: 0
+    namespace: "{{ meta.namespace }}"
     state: 'present'
 
 - name: Manage next month reports
@@ -59,6 +150,7 @@
     input_month: '{{ current_month }}'
     input_year: '{{ current_year }}'
     month_delta: 1
+    namespace: "{{ meta.namespace }}"
     state: 'present'
   when: (current_day | int)  > 24
 
@@ -68,5 +160,17 @@
     input_month: '{{ current_month }}'
     input_year: '{{ current_year }}'
     month_delta: -1
+    namespace: "{{ meta.namespace }}"
     state: 'absent'
   when: (current_day | int)  < 3
+
+- name: Upload metric data
+  import_tasks: collect.yml
+  vars:
+    current_month: '{{ current_month }}'
+    current_year: '{{ current_year }}'
+    namespace: "{{ meta.namespace }}"
+    current_cr: "{{ current_cr }}"
+    current_cr_spec: "{{ current_cr_spec }}"
+  when:
+    - collect_data | bool
diff --git a/roles/setup/tasks/update-status.yml b/roles/setup/tasks/update-status.yml
new file mode 100644
index 0000000..7c47b71
--- /dev/null
+++ b/roles/setup/tasks/update-status.yml
@@ -0,0 +1,8 @@
+---
+- operator_sdk.util.k8s_status:
+    api_version: "{{ current_cr.apiVersion }}"
+    kind: "{{ current_cr.kind }}"
+    name: "{{ current_cr.metadata.name }}"
+    namespace: "{{ current_cr.metadata.namespace }}"
+    status: "{{ status_vars }}"
+  ignore_errors: yes
diff --git a/watches.yaml b/watches.yaml
index 3aa86d3..79b1a3a 100644
--- a/watches.yaml
+++ b/watches.yaml
@@ -4,10 +4,4 @@
   group: cost-mgmt.openshift.io
   kind: CostManagement
   role: /opt/ansible/roles/setup
-
-# collect the reports
-- version: v1alpha1
-  group: cost-mgmt-data.openshift.io
-  kind: CostManagementData
-  role: /opt/ansible/roles/collect
-  reconcilePeriod: 360m
+  reconcilePeriod: 5m

From a6a70644430aa5be765ed825d3f801c274c68822 Mon Sep 17 00:00:00 2001
From: Chris Hambridge <chambrid@redhat.com>
Date: Thu, 10 Sep 2020 12:11:52 -0400
Subject: [PATCH 2/2] Remove collect role and fix test-local tests.

---
 molecule/default/playbook.yml               |   1 -
 molecule/test-cluster/converge.yml          |  32 --
 molecule/test-local/converge.yml            | 159 +++------
 roles/collect/defaults/main.yml             |  27 --
 roles/collect/files/package_report.py       | 300 ----------------
 roles/collect/files/trusted_ca_certmap.yaml |  11 -
 roles/collect/tasks/main.yml                | 367 --------------------
 roles/setup/tasks/collect.yml               |   8 +-
 roles/setup/tasks/main.yml                  |   6 +-
 9 files changed, 52 insertions(+), 859 deletions(-)
 delete mode 100644 roles/collect/defaults/main.yml
 delete mode 100755 roles/collect/files/package_report.py
 delete mode 100644 roles/collect/files/trusted_ca_certmap.yaml
 delete mode 100644 roles/collect/tasks/main.yml

diff --git a/molecule/default/playbook.yml b/molecule/default/playbook.yml
index bacbabf..1b6fe5b 100644
--- a/molecule/default/playbook.yml
+++ b/molecule/default/playbook.yml
@@ -6,6 +6,5 @@
     ansible_python_interpreter: '{{ ansible_playbook_python }}'
   roles:
     - setup
-    - collect
 
 - import_playbook: '{{ playbook_dir }}/asserts.yml'
diff --git a/molecule/test-cluster/converge.yml b/molecule/test-cluster/converge.yml
index 15fb355..9faae17 100644
--- a/molecule/test-cluster/converge.yml
+++ b/molecule/test-cluster/converge.yml
@@ -41,36 +41,4 @@
       delay: 12
       retries: 10
 
-    - name: Create the cost-mgmt-data.openshift.io/v1alpha1.CostManagementData
-      k8s:
-        state: present
-        namespace: '{{ namespace }}'
-        definition: '{{ cm_data_cr }}'
-
-    - name: debug cost-mgmt data custom resource
-      ignore_errors: yes
-      failed_when: false
-      debug:
-        var: debug_cr
-      vars:
-        debug_cr: '{{ lookup("k8s",
-          kind=cm_data_cr.kind,
-          api_version=cm_data_cr.apiVersion,
-          namespace=namespace,
-          resource_name=cm_data_cr.metadata.name
-        )}}'
-
-    - name: Wait 2m for reconciliation to complete for collect
-      k8s_info:
-        api_version: '{{ cm_data_cr.apiVersion }}'
-        kind: '{{ cm_data_cr.kind }}'
-        namespace: '{{ namespace }}'
-        name: '{{ cm_data_cr.metadata.name }}'
-      register: cm_data_cr
-      until:
-        - "'Successful' in (cm_data_cr |
-          json_query('resources[].status.conditions[].reason'))"
-      delay: 12
-      retries: 10
-
 - import_playbook: '{{ playbook_dir }}/../default/asserts.yml'
diff --git a/molecule/test-local/converge.yml b/molecule/test-local/converge.yml
index f5209f3..9fb43ad 100644
--- a/molecule/test-local/converge.yml
+++ b/molecule/test-local/converge.yml
@@ -43,7 +43,7 @@
         src: 'mock_ingress_server.py'
         mode: 0666
 
-- name: Converge on setup
+- name: Prepare converge on setup
   hosts: localhost
   connection: local
   vars:
@@ -56,10 +56,6 @@
       'crds/cost_mgmt_cr.yaml'])) | from_yaml }}"
     cm_crd: "{{ lookup('file', '/'.join([deploy_dir,
       'crds/cost_mgmt_crd.yaml'])) | from_yaml }}"
-    cm_data_crd: "{{ lookup('file', '/'.join([deploy_dir,
-      'crds/cost_mgmt_data_crd.yaml'])) | from_yaml }}"
-    cm_data_cr: "{{ lookup('file', '/'.join([molecule_dir,
-      'crds/cost_mgmt_data_cr.yaml'])) | from_yaml }}"
     metering_cr: "{{ lookup('file', '/'.join([molecule_dir,
       'crds/metering_cr.yaml'])) | from_yaml }}"
     metering_crd: "{{ lookup('file', '/'.join([molecule_dir,
@@ -78,6 +74,9 @@
       'crds/reporting_operator_token.yaml'])) | from_yaml }}"
     trusted_ca_config: "{{ lookup('file', '/'.join([molecule_dir,
       'crds/trusted_ca_certmap.yaml'])) | from_yaml }}"
+    authentication_secret: "{{ lookup('file', '/'.join([molecule_dir,
+      'crds/authentication_secret.yaml'])) | from_yaml }}"
+
   tasks:
     - block:
         - name: Delete the Operator Deployment
@@ -131,12 +130,6 @@
             namespace: '{{ namespace }}'
             definition: '{{ cm_crd }}'
 
-        - name: Create the cost-mgmt-data.openshift.io/v1alpha1.CostManagementData crd
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ cm_data_crd }}'
-
         - name: Create the trusted cert ConfigMap
           k8s:
             state: present
@@ -168,8 +161,8 @@
               (cr | json_query('resources[].status.conditions[].reason'))"
             - "'Metering has not been configured.' in
               (cr | json_query('resources[].status.conditions[].message'))"
-          delay: 12
-          retries: 15
+          delay: 10
+          retries: 12
 
       rescue:
         - name: debug cr
@@ -254,6 +247,12 @@
           command: docker ps -aqf "ancestor=cost-mgmt.openshift.io/cost-mgmt-operator:testing"
           register: container_id
 
+        - name: Create the authentication Secret
+          k8s:
+            state: present
+            namespace: '{{ namespace }}'
+            definition: '{{ authentication_secret }}'
+
         - name: Create the metering namespace
           k8s:
             api_version: v1
@@ -321,62 +320,7 @@
           debug:
             var: reporting_token
 
-        - name: Create the cost-mgmt.openshift.io/v1alpha1.CostManagement crd
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ cm_crd }}'
-
-        - name: Create the cost-mgmt-data.openshift.io/v1alpha1.CostManagementData crd
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ cm_data_crd }}'
-
-        - name: Create the cost-mgmt.openshift.io/v1alpha1.CostManagement
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ cm_cr }}'
-
-        - name: Wait 2m for reconciliation to complete for setup
-          k8s_info:
-            api_version: '{{ cm_cr.apiVersion }}'
-            kind: '{{ cm_cr.kind }}'
-            namespace: '{{ namespace }}'
-            name: '{{ cm_cr.metadata.name }}'
-          register: cr
-          until:
-            - "'Successful' in (cr |
-              json_query('resources[].status.conditions[].reason'))"
-          delay: 12
-          retries: 10
-
       rescue:
-        - name: debug cr
-          ignore_errors: yes
-          failed_when: false
-          debug:
-            var: debug_cr
-          vars:
-            debug_cr: '{{ lookup("k8s",
-              kind=cm_data_cr.kind,
-              api_version=cm_data_cr.apiVersion,
-              namespace=namespace,
-              resource_name=cm_data_cr.metadata.name
-            )}}'
-
-        - name: debug cost-mgmt lookup
-          ignore_errors: yes
-          failed_when: false
-          debug:
-            var: deploy
-          vars:
-            deploy: '{{ lookup("k8s",
-              kind="Deployment",
-              api_version="apps/v1",
-              namespace=namespace
-            )}}'
 
         - name: get operator logs
           ignore_errors: yes
@@ -452,57 +396,44 @@
     - name: start server
       command: docker exec -i "{{ containerName }}" bash -c 'cd /tmp/www; nohup python3 mock_ingress_server.py </dev/null >/dev/null 2>&1 &'
 
-- name: Converge for collect
+- name: Converge for setup
   hosts: localhost
   connection: local
   vars:
     ansible_python_interpreter: '{{ ansible_playbook_python }}'
     deploy_dir: "{{ lookup('env', 'MOLECULE_PROJECT_DIRECTORY') }}/deploy"
     molecule_dir: "{{ lookup('env', 'MOLECULE_PROJECT_DIRECTORY') }}/molecule"
-    cm_data_cr: "{{ lookup('file', '/'.join([deploy_dir,
-      'crds/cost_mgmt_data_cr.yaml'])) | from_yaml }}"
-    authentication_secret: "{{ lookup('file', '/'.join([molecule_dir,
-      'crds/authentication_secret.yaml'])) | from_yaml }}"
     REPLACE_IMAGE: cost-mgmt.openshift.io/cost-mgmt-operator:testing
+    cm_cr: "{{ lookup('file', '/'.join([molecule_dir,
+      'crds/cost_mgmt_cr.yaml'])) | from_yaml }}"
+    cm_crd: "{{ lookup('file', '/'.join([deploy_dir,
+      'crds/cost_mgmt_crd.yaml'])) | from_yaml }}"
   tasks:
     - block:
-        - name: Create the authentication Secret
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ authentication_secret }}'
-
-        - name: Create the cost-mgmt-data.openshift.io/v1alpha1.CostManagementData
-          k8s:
-            state: present
-            namespace: '{{ namespace }}'
-            definition: '{{ cm_data_cr }}'
-
-        - name: debug cost-mgmt data custom resource
-          ignore_errors: yes
-          failed_when: false
-          debug:
-            var: debug_cr
-          vars:
-            debug_cr: '{{ lookup("k8s",
-              kind=cm_data_cr.kind,
-              api_version=cm_data_cr.apiVersion,
-              namespace=namespace,
-              resource_name=cm_data_cr.metadata.name
-            )}}'
-
-        - name: Wait 2m for reconciliation to complete for collect
-          k8s_info:
-            api_version: '{{ cm_data_cr.apiVersion }}'
-            kind: '{{ cm_data_cr.kind }}'
-            namespace: '{{ namespace }}'
-            name: '{{ cm_data_cr.metadata.name }}'
-          register: cm_data_cr
-          until:
-            - "'Successful' in (cm_data_cr |
-              json_query('resources[].status.conditions[].reason'))"
-          delay: 12
-          retries: 10
+      - name: Create the cost-mgmt.openshift.io/v1alpha1.CostManagement crd
+        k8s:
+          state: present
+          namespace: '{{ namespace }}'
+          definition: '{{ cm_crd }}'
+
+      - name: Create the cost-mgmt.openshift.io/v1alpha1.CostManagement
+        k8s:
+          state: present
+          namespace: '{{ namespace }}'
+          definition: '{{ cm_cr }}'
+
+      - name: Wait 5m for reconciliation to complete for setup
+        k8s_info:
+          api_version: '{{ cm_cr.apiVersion }}'
+          kind: '{{ cm_cr.kind }}'
+          namespace: '{{ namespace }}'
+          name: '{{ cm_cr.metadata.name }}'
+        register: cr
+        until:
+          - "'Successful' in (cr |
+            json_query('resources[].status.conditions[].reason'))"
+        delay: 20
+        retries: 15
 
       rescue:
         - name: debug cr
@@ -512,10 +443,10 @@
             var: debug_cr
           vars:
             debug_cr: '{{ lookup("k8s",
-              kind=cm_data_cr.kind,
-              api_version=cm_data_cr.apiVersion,
+              kind=cm_cr.kind,
+              api_version=cm_cr.apiVersion,
               namespace=namespace,
-              resource_name=cm_data_cr.metadata.name
+              resource_name=cm_cr.metadata.name
             )}}'
 
         - name: debug cost-mgmt lookup
@@ -534,7 +465,7 @@
           ignore_errors: yes
           failed_when: false
           command: kubectl logs
-            deployment/{{ definition.metadata.name }} -n {{ namespace }} -c  operator
+            deployment/{{ definition.metadata.name }} -n {{ namespace }}
           environment:
             KUBECONFIG: '{{ lookup("env", "KUBECONFIG") }}'
           vars:
diff --git a/roles/collect/defaults/main.yml b/roles/collect/defaults/main.yml
deleted file mode 100644
index bea4c35..0000000
--- a/roles/collect/defaults/main.yml
+++ /dev/null
@@ -1,27 +0,0 @@
----
-namespace: openshift-metering
-collect_format: 'csv'
-collect_manifest_uuid: '{{ 99999999999999999999 | random | to_uuid }}'
-collect_archive_name: cost-mgmt
-ocp_validate_cert: 'true'
-ocp_cluster_id: ''
-reporting_operator_token_name: ''
-collect_reports:
-  - 'cm-openshift-usage-lookback-'
-  - 'cm-openshift-persistentvolumeclaim-lookback-'
-  - 'cm-openshift-node-labels-lookback-'
-collect_download_path: '/tmp/cost-mgmt-operator-collect'
-collect_delete_after: 'true'
-collect_ocp_report_timeout: 60
-collect_max_csvfile_size: 99
-api_prefix: 'https://'
-ingress_url: 'https://cloud.redhat.com/api/ingress/v1/upload'
-authentication: 'token'
-authentication_token: ''
-username: ''
-password: ''
-cacert_path: '{{ collect_download_path }}/ca-bundle.crt'
-debug: 'true'
-collect_upload_wait: '{{ 2100 | random(step=10) }}'
-current_month: '{{ ansible_date_time.month | int }}'
-current_year: '{{ ansible_date_time.year | int }}'
diff --git a/roles/collect/files/package_report.py b/roles/collect/files/package_report.py
deleted file mode 100755
index 6ec0d88..0000000
--- a/roles/collect/files/package_report.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-#
-# Copyright 2020 Red Hat, Inc.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program.  If not, see <https://www.gnu.org/licenses/>.
-#
-"""Create a tarball for metering reports downloaded from an OpenShift cluster."""
-
-import argparse
-import csv
-import logging
-import json
-import os
-import sys
-import tarfile
-from datetime import datetime
-from uuid import uuid4
-
-DEFAULT_MAX_SIZE = 100
-MEGABYTE = 1024 * 1024
-
-TEMPLATE = {
-    "files": None,
-    "date": datetime.utcnow().isoformat(),
-    "uuid": None,
-    "cluster_id": None
-}
-
-
-# the csv module doesn't expose the bytes-offset of the
-# underlying file object.
-#
-# instead, the script estimates the size of the data as VARIANCE percent larger than a
-# naïve string concatenation of the CSV fields to cover the overhead of quoting
-# and delimiters. This gets close enough for now.
-VARIANCE = 0.03
-
-# Flag to use when writing to a file. Changed to "w" by the -o flag.
-FILE_FLAG = "x"
-
-# if we're creating more than 1k files, something is probably wrong.
-MAX_SPLITS = 1000
-
-# logging
-LOG = logging.getLogger(__name__)
-LOG_FORMAT = "%(asctime)s [%(levelname)s] %(message)s"
-LOG_VERBOSITY = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
-logging.basicConfig(format=LOG_FORMAT, level=logging.ERROR, stream=sys.stdout)
-
-
-def parse_args():
-    """Handle CLI arg parsing."""
-    parser = argparse.ArgumentParser(
-        description="Cost Management CSV file packaging script", prog=sys.argv[0])
-
-    # required args
-    parser.add_argument("-f", "--filepath", required=True,
-                        help="path to files to package")
-    parser.add_argument(
-        "-s",
-        "--max-size",
-        type=int,
-        default=DEFAULT_MAX_SIZE,
-        help=f"Maximum size of packages in MiB. (Default: {DEFAULT_MAX_SIZE} MiB)",
-    )
-    parser.add_argument(
-        "-o", "--overwrite", action="store_true", default=False, help="whether to overwrite existing files."
-    )
-    parser.add_argument("--ocp-cluster-id", required=True,
-                        help="OCP Cluster ID")
-    parser.add_argument("-v", "--verbosity", action="count",
-                        default=0, help="increase verbosity (up to -vvv)")
-    return parser.parse_args()
-
-
-def write_part(filename, csvreader, header, num=0, size=(DEFAULT_MAX_SIZE * MEGABYTE)):
-    """Split a part of the file into a new file.
-
-    Args:
-        filename (str) name of original file
-        csvreader (CSVReader) the csvreader object of original file
-        header (list) the CSV file's header list
-        num (int) the current split file index
-        size (int) the maximum size of the split file in bytes
-
-    Returns:
-        (str) the name of the new split file
-        (bool) whether the split reached the end of the csvreader
-
-    """
-    fname_part, ext = os.path.splitext(filename)
-    size_estimate = 0
-    split_filename = f"{fname_part}_{num}{ext}"
-    try:
-        with open(split_filename, FILE_FLAG) as split_part:
-            LOG.info(f"Writing new file: {split_filename}")
-            csvwriter = csv.writer(split_part)
-            csvwriter.writerow(header)
-            for row in csvreader:
-                csvwriter.writerow(row)
-
-                row_len = len(",".join(row))
-                size_estimate += row_len + (row_len * VARIANCE)
-
-                LOG.debug(f"file size (est): {size_estimate}")
-                if size_estimate >= size:
-                    return (split_filename, False)
-    except (IOError, FileExistsError) as exc:
-        LOG.critical(f"Fatal error: {exc}")
-        sys.exit(2)
-    return (split_filename, True)
-
-
-def need_split(filepath, max_size):
-    """Determine whether to split up the CSV files.
-
-    Args:
-        filepath (str) a directory
-        max_size (int) maximum split size in MiB
-
-    Returns:
-        True if any single file OR the total sum of files exceeds the MAX_SIZE
-        False if each single file AND the total file size is below MAX_SIZE
-
-    """
-    total_size = 0
-    max_bytes = max_size * MEGABYTE
-    for filename in os.listdir(filepath):
-        this_size = os.stat(f"{filepath}/{filename}").st_size
-        total_size += this_size
-        if this_size >= max_bytes or total_size >= max_bytes:
-            return True
-    return False
-
-
-def split_files(filepath, max_size):
-    """Split any files that exceed the file size threshold.
-
-    Args:
-        filepath (str) file path containing the CSV files
-        max_size (int) the maximum size in MiB for each file
-
-    """
-    for filename in os.listdir(filepath):
-        abspath = f"{filepath}/{filename}"
-        if os.stat(abspath).st_size >= max_size * MEGABYTE:
-            csvheader = None
-            split_files = []
-            with open(abspath, "r") as fhandle:
-                csvreader = csv.reader(fhandle)
-                csvheader = next(csvreader)
-                LOG.debug(f"Header: {csvheader}")
-
-                part = 1
-                while True:
-                    newfile, eof = write_part(
-                        abspath, csvreader, csvheader, num=part, size=(max_size * MEGABYTE))
-                    split_files.append(newfile)
-                    part += 1
-                    if eof or part >= MAX_SPLITS:
-                        break
-
-            os.remove(abspath)
-
-            # return the list of split files to stdout
-            LOG.info(f"Split files: {split_files}")
-
-
-def render_manifest(args, archivefiles=[]):
-    """Render the manifest template and write it to a file.
-
-    Args:
-        args (Namespace) an ArgumentParser Namespace object
-
-    Returns:
-        (str) the rendered manifest file name
-        (str) the manifest uuid
-    """
-    manifest = TEMPLATE
-    manifest_uuid = str(uuid4())
-    manifest["cluster_id"] = args.ocp_cluster_id
-    manifest["uuid"] = manifest_uuid
-    manifest_files = []
-    for idx in range(len(archivefiles)): 
-        upload_name = f"{manifest_uuid}_openshift_usage_report.{idx}.csv"
-        manifest_files.append(upload_name)
-    manifest["files"] = manifest_files
-    LOG.debug(f"rendered manifest: {manifest}")
-    manifest_filename = f"{args.filepath}/manifest.json"
-
-    if not os.path.exists(args.filepath):
-        os.makedirs(args.filepath)
-        LOG.info(f"Created dirs: {args.filepath}")
-
-    try:
-        with open(manifest_filename, FILE_FLAG) as mfile:
-            json.dump(manifest, mfile)
-    except FileExistsError as exc:
-        LOG.critical(f"Fatal error: {exc}")
-        sys.exit(2)
-    LOG.info(f"manifest generated")
-    return (manifest_filename, manifest_uuid)
-
-
-def write_tarball(args, tarfilename, manifest_filename, manifest_uuid, archivefiles, file_count=0):
-    """Write a tarball, adding the given files to the archive.
-
-    Args:
-        args (Namespace) an ArgumentParser Namespace object
-        tarfilename (str) the name of the tarball to create
-        manifest_filename (str) the name of the report manifest
-        manifest_uuid (str) the unique identifier of the manifest
-        archivefiles (list) the list of files to include in the archive
-        file_count (int) file number initializer
-
-    Returns:
-        (str) full filepath of the created tarball
-
-    Raises:
-        FileExistsError if tarfilename already exists
-    """
-    if not archivefiles:
-        return None
-    
-    try:
-        with tarfile.open(tarfilename, f"{FILE_FLAG}:gz") as tarball:
-            for fname in archivefiles:
-                LOG.debug(f"Adding {fname} to {tarfilename}: ")
-                if fname.endswith(".csv"):
-                    upload_name = f"{manifest_uuid}_openshift_usage_report.{file_count}.csv"
-                    tarball.add(fname, arcname=upload_name)
-                    file_count += 1
-            tarball.add(manifest_filename, arcname="manifest.json")
-    except FileExistsError as exc:
-        LOG.critical(exc)
-        sys.exit(2)
-    LOG.info(f"Wrote: {tarfilename}")
-    return f"{tarfilename}", file_count
-
-
-def build_local_csv_file_list(staging_directory):
-    """Build a list of all report csv files in staging directory."""
-    file_list = []
-    for csv_file in os.listdir(staging_directory):
-        if ".csv" in csv_file:
-            file_list.append(f"{staging_directory}/{csv_file}")
-    return file_list
-
-if "__main__" in __name__:
-    args = parse_args()
-    if args.verbosity:
-        LOG.setLevel(LOG_VERBOSITY[args.verbosity])
-    LOG.debug("CLI Args: %s", args)
-
-    if args.overwrite:
-        FILE_FLAG = "w"
-
-    out_files = []
-    need_split = need_split(args.filepath, args.max_size)
-    if need_split:
-        split_files(args.filepath, args.max_size)
-        tarpath = args.filepath + "/../"
-        tarfiletmpl = "cost-mgmt{}.tar.gz"
-
-        file_list = build_local_csv_file_list(args.filepath)
-        manifest_filename, manifest_uuid = render_manifest(args, file_list)
-        file_count = 0
-        for idx, filename in enumerate(file_list):
-            if ".csv" in filename:
-                tarfilename = os.path.abspath(
-                    tarpath + tarfiletmpl.format(idx))
-                output_tar, file_count = write_tarball(args, 
-                    tarfilename, manifest_filename, manifest_uuid, [filename], file_count)
-                if output_tar:
-                    out_files.append(output_tar)
-
-    else:
-        tarfilename = os.path.abspath(args.filepath + "/../cost-mgmt.tar.gz")
-
-        file_list = build_local_csv_file_list(args.filepath)
-        if file_list:
-            manifest_filename, manifest_uuid = render_manifest(args, file_list)
-            output_tar, _ = write_tarball(args, tarfilename, manifest_filename, manifest_uuid, file_list)
-            if output_tar:
-                out_files.append(output_tar)
-
-    for fname in out_files:
-        print(fname)
diff --git a/roles/collect/files/trusted_ca_certmap.yaml b/roles/collect/files/trusted_ca_certmap.yaml
deleted file mode 100644
index 233b3b0..0000000
--- a/roles/collect/files/trusted_ca_certmap.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
----
-
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  namespace: openshift-metering
-  name: trusted-ca-bundle
-  annotations:
-    release.openshift.io/create-only: "true"
-  labels:
-    config.openshift.io/inject-trusted-cabundle: "true"
diff --git a/roles/collect/tasks/main.yml b/roles/collect/tasks/main.yml
deleted file mode 100644
index 7450e4f..0000000
--- a/roles/collect/tasks/main.yml
+++ /dev/null
@@ -1,367 +0,0 @@
----
-
-- name: print _cost_mgmt_data_openshift_io_costmanagementdata
-  debug:
-    var: _cost_mgmt_data_openshift_io_costmanagementdata
-
-- name: print meta
-  debug:
-    var: meta
-
-- name: Obtain cost-mgmt-setup info
-  community.kubernetes.k8s_info:
-    api_version: cost-mgmt.openshift.io/v1alpha1
-    kind: CostManagement
-    namespace: "{{ namespace }}"
-    name: cost-mgmt-setup
-  register: cost_mgmt_setup
-
-- name: Fail if cost-mgmt-setup does not exist
-  fail:
-    msg: 'The cost-mgmt-setup custom resource has not been configured.'
-  when: not cost_mgmt_setup.resources
-
-- name: Set cluster ID
-  set_fact:
-    ocp_cluster_id: "{{ cost_mgmt_setup.resources[0].spec.clusterID }}"
-  when: cost_mgmt_setup.resources
-
-- name: Check for OCP clusterID
-  debug: msg='OCP clusterID is not defined'
-  when: not ocp_cluster_id
-
-- name: Set validation boolean
-  set_fact:
-    ocp_validate_cert: "{{ cost_mgmt_setup.resources[0].spec.validate_cert }}"
-  when: cost_mgmt_setup.resources
-
-- name: Check for validation boolean
-  debug: msg='HTTPS certificate validation variable is not defined; defaulting to true'
-  when: not cost_mgmt_setup.resources
-
-- name: Set service account token name
-  set_fact:
-    reporting_operator_token_name: "{{ cost_mgmt_setup.resources[0].spec.reporting_operator_token_name }}"
-  when: cost_mgmt_setup.resources
-
-- name: Check for service account token name
-  debug: msg='Reporting Operator service account token name is not defined'
-  when: not reporting_operator_token_name
-
-- name: Fail if the clusterID or service token are not defined
-  fail:
-    msg: 'The cost-mgmt-setup custom resource requires the clusterID and reporting_operator_token_name to be defined.'
-  when: not ocp_cluster_id or not reporting_operator_token_name
-
-- name: Set upload_wait
-  set_fact:
-    collect_upload_wait: "{{ cost_mgmt_setup.resources[0].spec.upload_wait | int }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
-
-- name: Set current_month
-  set_fact:
-    current_month: "{{ cost_mgmt_setup.resources[0].spec.current_month | string }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
-
-- name: Format current_month string if less than 10
-  set_fact:
-    current_month: '{{ "0" + (current_month | string) }}'
-  when:
-    - (current_month | int)  < 10
-
-- name: Set current_year
-  set_fact:
-    current_year: "{{ cost_mgmt_setup.resources[0].spec.current_year | string }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
-
-- name: Set monthly suffix for reports
-  set_fact:
-    current_year_month: '{{ (current_year | string )  + (current_month | string) }}'
-
-- name: Obtain metering api info
-  community.kubernetes.k8s_info:
-    api_version: v1
-    kind: Route
-    namespace: "{{ namespace }}"
-  register: metering_route
-
-- name: Set metering api route
-  set_fact:
-    metering_api_route: "{{ api_prefix }}{{ metering_route.resources[0].spec.host }}/api/v1/reports/get"
-  when: metering_route.resources
-
-- name: Get the service account token
-  community.kubernetes.k8s_info:
-    api_version: v1
-    kind: Secret
-    namespace: "{{ namespace }}"
-    name: "{{ reporting_operator_token_name }}"
-  register: reporting_token
-
-- name: Set authentication_secret name
-  set_fact:
-    authentication_secret_name: "{{ cost_mgmt_setup.resources[0].spec.authentication_secret_name }}"
-  when: cost_mgmt_setup.resources
-
-- name: debug auth secret name
-  debug:
-    var: authentication_secret_name
-  when: debug
-
-- name: Set the authentication method
-  set_fact:
-    authentication: "{{ cost_mgmt_setup.resources[0].spec.authentication }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
-
-- name: debug auth method
-  debug:
-    var: authentication
-  when: debug
-
-- name: Set the ingress URL
-  set_fact:
-    ingress_url: "{{ cost_mgmt_setup.resources[0].spec.ingress_url }}"
-  when: cost_mgmt_setup.resources
-  ignore_errors: true
-
-- name: debug ingress URL
-  debug:
-    var: ingress_url
-  when: debug
-
-- name: Fail if auth secret is not set
-  fail:
-    msg: 'The cost-mgmt-setup custom resource requires the authentication_secret_name to be defined.'
-  when: not authentication_secret_name
-
-- name: Get the authentication secret
-  community.kubernetes.k8s_info:
-    api_version: v1
-    kind: Secret
-    namespace: "{{ namespace }}"
-    name: "{{ authentication_secret_name }}"
-  register: authentication_secret
-
-- name: Decode the service account token
-  set_fact:
-    reporting_operator_token: "{{ reporting_token.resources[0].data.token | b64decode }}"
-  when: reporting_token.resources
-
-- name: Fail when reporting_operator_token not defined
-  fail:
-    msg: 'Reporting Operator token does not exist'
-  when: not reporting_operator_token
-
-- name: Fail if the authentication secret could not be found
-  fail:
-    msg: 'The authentication secret could not be found.'
-  when: not authentication_secret.resources
-
-- name: If authentication is set to token, get the auth token
-  set_fact:
-    authentication_token: "{{ authentication_secret.resources[0].data.token }}"
-  when: authentication_secret.resources and authentication == 'token'
-
-- name: If authentication is set to basic then grab username and password
-  set_fact:
-    username: "{{ authentication_secret.resources[0].data.username | b64decode }}"
-    password: "{{ authentication_secret.resources[0].data.password | b64decode }}"
-  when: authentication_secret.resources and authentication == 'basic'
-
-- name: Fail if no token but token is specified
-  fail:
-    msg: 'The authentication method was set to token but the authentication secret did not contain a token.'
-  when: authentication == 'token' and not authentication_token
-
-- name: Fail if no username but basic authentication is specified
-  fail:
-    msg: 'The authentication method was set to basic but the authentication secret did not contain a username.'
-  when: authentication == 'basic' and not username
-
-- name: Fail if no password but basic authentication is specified
-  fail:
-    msg: 'The authentication method was set to basic but the authentication secret did not contain a password.'
-  when: authentication == 'basic' and not password
-
-- name: Check if cert file exists
-  stat:
-    path: "{{ cacert_path }}"
-  register: trusted_cert
-
-- name: debug the trusted cert
-  debug:
-    var: trusted_cert
-  when: debug
-
-- name: Fail if the trusted cert does not exist
-  fail:
-    msg: 'Failing because the ssl certificate does not exist.'
-  when: not trusted_cert
-
-- name: Obtain the source commit from file
-  set_fact:
-    source_commit: "{{ lookup('file', ansible_env.HOME + '/commit') }}"
-
-- name: debug the source_commit
-  debug:
-    var: source_commit
-  when: debug
-
-- name: Create trusted-ca-bundle if it doesn't exist
-  community.kubernetes.k8s:
-    namespace: "{{ namespace }}"
-    state: present
-    src: '{{ ansible_env.HOME }}/roles/collect/files/trusted_ca_certmap.yaml'
-
-- name: Get the trusted-ca-bundle
-  community.kubernetes.k8s_info:
-    api_version: v1
-    kind: ConfigMap
-    namespace: "{{ namespace }}"
-    name: "trusted-ca-bundle"
-  register: trusted_ca_bundle
-
-- name: Set the trusted-ca-bundle crt contents
-  set_fact:
-    trusted_ca_bundle_contents: "{{ trusted_ca_bundle.resources[0].data['ca-bundle.crt'] }}"
-  when: trusted_ca_bundle.resources
-
-- name: Write the trusted-ca-bundle contents to a file
-  copy: content="{{ trusted_ca_bundle_contents }}" dest="{{ cacert_path }}"
-  when: trusted_ca_bundle_contents is defined
-
-- name: Fail the trusted ca certificate could not be found and certificate validation is enabled
-  fail:
-    msg: 'The trusted ca certificate could not be found and certificate validation is enabled.'
-  when: trusted_ca_bundle_contents is not defined
-
-- name: Set download request facts
-  set_fact:
-    collect_file_prefix: '{{ collect_manifest_uuid }}'
-    format: "&format={{ collect_format }}"
-    namespace: "&namespace={{ namespace }}"
-
-# getting a little clever to build lists to append into
-- name: initialize fact lists
-  set_fact:
-    api_params: []
-    api_urls: []
-    csv_files: []
-
-# this appends the string inside the brackets to the 'api_params' list.
-- name: compile URL query params, append to param list
-  set_fact:
-    api_params: "{{ api_params + ['?name='+item+current_year_month+format+namespace] }}"
-  with_items: "{{ collect_reports }}"
-
-# this appends the string inside the brackets to the 'api_urls' list.
-- name: assemble compiled URL facts, append to list.
-  set_fact:
-    api_urls: "{{ api_urls + [metering_api_route+item] }}"
-  with_items: "{{ api_params }}"
-
-- name: Set download_path
-  set_fact:
-    collect_cluster_download_path: '{{ collect_download_path }}/{{  ocp_cluster_id }}'
-
-- name: Remove temp files
-  file:
-    path: '{{ collect_cluster_download_path }}'
-    state: absent
-  when: collect_delete_after | bool
-
-- name: Create temp dir for downloaded files
-  file:
-    path: '{{ collect_cluster_download_path }}'
-    state: directory
-    mode: 0777
-
-- name: Download OCP report from endpoint
-  get_url:
-    url: '{{ item }}'
-    headers:
-      Authorization: "Bearer {{ reporting_operator_token }}"
-    dest: '{{ collect_cluster_download_path }}/{{ collect_file_prefix }}_openshift_usage_report.{{ idx }}.{{ collect_format }}'
-    validate_certs: '{{ ocp_validate_cert | bool }}'
-    timeout: '{{ collect_ocp_report_timeout }}'
-  with_items: "{{ api_urls }}"
-  loop_control:
-    index_var: idx
-  register: download_result
-
-- name: debug download result
-  debug:
-    var: download_result
-  when: debug
-
-- name: append filename to fact list
-  set_fact:
-    csv_files: "{{ csv_files + [item.dest | basename] }}"
-  with_items: "{{ download_result.results }}"
-
-- name: debug csv_files
-  debug:
-    var: csv_files
-  when: debug
-
-- name: Check that required files exist
-  stat:
-    path: '{{ collect_cluster_download_path + "/" + item }}'
-  register: csv_stat_result
-  with_items:
-    - '{{ csv_files }}'
-
-- name: debug the csv_stat_result
-  debug:
-    var: csv_stat_result
-  when: debug
-
-- name: Check for empty download results
-  fail:
-    msg: 'Downloaded file {{ item }} has no content or could not be found: {{ item.stat }}.'
-  when: not item.stat.exists or (item.stat.exists and item.stat.size <= 0)
-  with_items:
-    - '{{ csv_stat_result.results }}'
-
-- name: Run packaging script to prepare reports for sending to Insights
-  script: package_report.py --filepath {{ ocp_cluster_id }} --max-size {{ collect_max_csvfile_size }} --ocp-cluster-id {{ ocp_cluster_id }} --overwrite
-  args:
-    chdir: '{{ collect_download_path }}'
-  register: packaged_reports
-
-- name: Wait time before upload in seconds
-  debug:
-    var: collect_upload_wait
-
-- name: Wait before upload to space out metric delivery
-  wait_for:
-    timeout: '{{ collect_upload_wait }}'
-  delegate_to: localhost
-
-- name: Upload the cost report to ingress using basic auth
-  shell:
-    cmd: 'curl -vvvv -F "file=@{{ item }};type=application/vnd.redhat.hccm.tar+tgz" {{ ingress_url }} -u {{ username }}:{{ password }} --cacert {{ cacert_path }}'
-    chdir: '{{ collect_download_path }}'
-  with_items:
-    - '{{ packaged_reports.stdout_lines }}'
-  when: authentication == 'basic'
-
-- name: Upload the cost report to ingress using token auth
-  shell:
-    cmd: 'curl -vvvv -F "file=@{{ item }};type=application/vnd.redhat.hccm.tar+tgz" {{ ingress_url }} -H "Authorization: Bearer {{ authentication_token }}" -H "User-Agent: cost-mgmt-operator/{{ source_commit }} cluster/{{ ocp_cluster_id }}" --cacert {{ cacert_path }}'
-    chdir: '{{ collect_download_path }}'
-  with_items:
-    - '{{ packaged_reports.stdout_lines }}'
-  when: authentication == 'token'
-
-- name: Remove upload files
-  file:
-    path: '{{ collect_download_path }}/{{ item }}'
-    state: absent
-  with_items:
-    - '{{ packaged_reports.stdout_lines }}'
-  when: collect_delete_after | bool
diff --git a/roles/setup/tasks/collect.yml b/roles/setup/tasks/collect.yml
index 9527737..3432324 100644
--- a/roles/setup/tasks/collect.yml
+++ b/roles/setup/tasks/collect.yml
@@ -40,13 +40,13 @@
 
 - name: Format current_month string if less than 10
   set_fact:
-    current_month: '{{ "0" + (current_month | string) }}'
+    current_month: '{{ "0" + (input_month | string) }}'
   when:
-    - (current_month | int)  < 10
+    - (input_month | int)  < 10
 
 - name: Set monthly suffix for reports
   set_fact:
-    current_year_month: '{{ (current_year | string )  + (current_month | string) }}'
+    current_year_month: '{{ (input_year | string )  + (input_month | string) }}'
 
 - name: Obtain metering api info
   community.kubernetes.k8s_info:
@@ -177,7 +177,7 @@
   community.kubernetes.k8s:
     namespace: "{{ namespace }}"
     state: present
-    src: '{{ ansible_env.HOME }}/roles/collect/files/trusted_ca_certmap.yaml'
+    src: '{{ ansible_env.HOME }}/roles/setup/files/trusted_ca_certmap.yaml'
 
 - name: Get the trusted-ca-bundle
   community.kubernetes.k8s_info:
diff --git a/roles/setup/tasks/main.yml b/roles/setup/tasks/main.yml
index 7325093..db3d010 100644
--- a/roles/setup/tasks/main.yml
+++ b/roles/setup/tasks/main.yml
@@ -97,7 +97,7 @@
 - name: Ensure metering has been configured
   fail:
     msg: 'Metering has not been configured.'
-  when: not metering_objects.resources
+  when: not metering_objects.resources or (metering_objects.resources is defined and metering_objects.resources | length == 0)
 
 - name: Set upload_cycle_seconds
   set_fact:
@@ -167,8 +167,8 @@
 - name: Upload metric data
   import_tasks: collect.yml
   vars:
-    current_month: '{{ current_month }}'
-    current_year: '{{ current_year }}'
+    input_month: '{{ current_month }}'
+    input_year: '{{ current_year }}'
     namespace: "{{ meta.namespace }}"
     current_cr: "{{ current_cr }}"
     current_cr_spec: "{{ current_cr_spec }}"