Commit 61bdf28b authored by Powell, Eric's avatar Powell, Eric
Browse files

Added support for multiple (team / phase) estimates for a given project in...

Added support for multiple (team / phase) estimates for a given project in database design, loader script, and PowerBI views.

Also started framework to export data from views to CSV files to be imported into PowerBI as baseline for cost and schedule.
parent cd61994d
Loading
Loading
Loading
Loading
+9 −7
Original line number Diff line number Diff line
@@ -3,22 +3,24 @@ import psycopg2
from psycopg2 import sql

class LoadRes(ResEstimateData):
    def __init__(self, project_title, resource_pool_name, filename, pg_conn):
    def __init__(self, project_id, workorder_title, schema_name, resource_pool_name, filename, pg_conn):
        super().__init__(filename)
        self.pg_con = pg_conn
        self.resource_poolid = self._get_id("fo_itsd_estimate",
        self.resource_poolid = self._get_id(schema_name,
                                            "resource_pools",
                                            "resource_pool_id",
                                            "description", resource_pool_name)
        try:
            self.workorder_id = self._insert_record_and_get_pk("fo_itsd_estimate", "workorders",
                                                               {'title':project_title,
                                                                'resource_pool_id':self.resource_poolid, 'estimate_file_name':filename},
            self.workorder_id = self._insert_record_and_get_pk(schema_name, "workorders",
                                                               {'title':workorder_title,
                                                                'resource_pool_id':self.resource_poolid,
                                                                'estimate_file_name':filename,
                                                                'project_id':project_id},
                                                               'workorder_id')
        except Exception as e:
            if e.pgcode == '23505':
                print ('Estimate file already loaded')
                exit(999)
                print('WorkOrder / Team /Project combination or Filename exists')
                exit(-999)

    def _get_id(self, schema_name, table_name, column, filter_column, value):

+15 −0
Original line number Diff line number Diff line
import csv
import psycopg2
from psycopg2 import sql

class ExportPowerBInputs:
    def __init__(self, Project_title, export_path, **kwargs):
        self.Project_title = Project_title
        self.export_path = export_path
        self.pg_con = psycopg2.connect(**kwargs)

    def _get_Data(self):
        ...

    def expoer(self):
        self._get_Data()
 No newline at end of file
+98 −0
Original line number Diff line number Diff line
import psycopg2
from psycopg2 import sql



class ProjectBaseline:
    def __init__(self, schema_name, project_title, **KWargs):
        self.pg_con = psycopg2.connect(**KWargs)
        try:
            self.project_id = self._insert_record_and_get_pk(schema_name, 'Projects', project_title, 'project_id')
        except Exception as e:
            if e.pgcode == '23505':
                print('Project Exists, using existing project_id')
                self.project_id = self._get_id(schema_name, 'projects', 'proiject_id', 'project_name', project_title)


    def _get_id(self, schema_name, table_name, column, filter_column, value):

        # Construct the SQL query safely to prevent SQL injection
        # This uses psycopg2.sql objects to safely handle identifiers (table/column names)
        query = sql.SQL("SELECT {col} FROM {tbl} WHERE {key} = {val}").format(
            col=sql.Identifier(column),
            tbl=sql.SQL('.').join([
                sql.Identifier(schema_name),
                sql.Identifier(table_name)
            ]),
            key=sql.Identifier(filter_column),
            val=sql.Placeholder()  # For the data value
        )

        try:
            # Use a 'with' statement for the cursor to ensure it's closed automatically
            with self.pg_con.cursor() as cursor:
                # Execute the query with the data values
                cursor.execute(query, (value,))

                # Fetch the returned primary key from the cursor
                return cursor.fetchone()[0]

        except Exception as e:
            print(query, (value,))
            raise e


    def _insert_record_and_get_pk(self, schema_name, table_name, data, pk_column='id'):
        """
        Inserts a record into a table and returns the new primary key.

        Args:
            table_name (str): The name of the target table.
            data (dict): A dictionary where keys are column names and values are the data to insert.
            pk_column (str): The name of the serial primary key column (defaults to 'id').

        Returns:
            The value of the primary key for the newly inserted row, or None if insertion fails.
        """
        new_id = None
        columns = data.keys()

        # Construct the SQL query safely to prevent SQL injection
        # This uses psycopg2.sql objects to safely handle identifiers (table/column names)
        query = sql.SQL("INSERT INTO {tbl} ({cols}) VALUES ({vals}) RETURNING {pk}").format(
            tbl=sql.SQL('.').join([
                sql.Identifier(schema_name),
                sql.Identifier(table_name)
            ]),
            cols=sql.SQL(', ').join(map(sql.Identifier, columns)),
            vals=sql.SQL(', ').join(sql.Placeholder() * len(columns)),
            pk=sql.Identifier(pk_column)
        )

        try:
            # Use a 'with' statement for the cursor to ensure it's closed automatically
            with self.pg_con.cursor() as cursor:
                # Execute the query with the data values
                cursor.execute(query, list(data.values()))

                # Fetch the returned primary key from the cursor
                new_id = cursor.fetchone()[0]

                # Commit the transaction to make the insert permanent
                self.pg_con.commit()
                print(f"✅ Record inserted successfully into '{table_name}'. New ID: {new_id}")

        except (Exception, psycopg2.Error) as error:
            print(f"❌ Error inserting record: {error}")
            # Roll back the transaction in case of an error
            if self.pg_con:
                self.pg_con.rollback()
            # return None
            raise error
        # TO DO - On insert fail, return the Record ID of the existing record


        return new_id

    def get_project_id(self):
        return self.project_id
 No newline at end of file
+7 −4
Original line number Diff line number Diff line
import psycopg2
from ..libraries.db_loader import LoadRes

from ..libraries.res_project_loader import ProjectBaseline

# Press the green button in the gutter to run the script.
if __name__ == '__main__':
    # Use project title to allow us to tie this to the information in the Feature Weighting Table
    project_title = 'COREII'
    # Use project title to allow us to tie this to the information in the Feature Weighting Tabl
    workorder_title = 'COREII'
    resource_pool_name = 'ITSD'
    # The name of the Resolution Export file
    res_filename = r'/mnt/c/Users/uvp/Downloads/estimate (8).xlsx'
    # Connect to the database holding the estimate information
    pg_connect_string = (F"host='hgis-prj-mgmt', port='5438', dbname='proj_status', "
                         F"user='postgres',password='postgres'")
    schema_name = 'fo_itsd_estimate'
    # NOTE: Replace with your actual database connection details
    DB_PARAMS = {
        "dbname": "proj_status",
@@ -21,6 +22,8 @@ if __name__ == '__main__':
        "port": "5438"
    }
    #Load the information
    obj_project = ProjectBaseline()
    project_id = obj_project.get_project_id()
    pg_conn = psycopg2.connect(**DB_PARAMS)
    objLoadRes = LoadRes(project_title, resource_pool_name, res_filename, pg_conn)
    objLoadRes = LoadRes(project_id, workorder_title, schema_name, resource_pool_name, res_filename, pg_conn)
    objLoadRes.load()