GitHub Actions – Job to Copy a Workflow From One Repository to Another

This is a continuation of what I posted the other day.

In this exercise – I needed to be able to automatically copy workflow files from one repository to another.

There is this known reported issue that despite adding a GitHub Actions Workflow file – unless it’s on the main/master branch – you can’t use the workflow_dispatch popup on the UI so build can be manually triggered (if needed).

So I did some investigation and created an automation to copy a workflow to the main/master branch.

As a result, I studied the GitHub API and wrote up the below snippet – it does the following:

  • Disables Branch Protections for the Target Repository and Branch
  • Checks if the Workflow exists on the Target Repository
  • If True on “Update if Exists” – Copies the Workflow from the Source Repository to the Target
    • Useful if wanting to do bulk updates across many repositories but not wanting to overwrite an existing actions build that has already been created on the Target
  • Enables Branch Protections for the Target Repository and Branch

File Name – .github/workflows/copy_workflow.yaml

name: Copy Workflow from Source to Target Repository

on:
  workflow_dispatch:
    inputs:
      owner:
        description: 'Source/Target Organization'
        required: true
        default: 'my-organization-name'
      target_repo:
        description: 'Target Repository (Place to Copy Workflow File TO)'
        required: true
        default: 'ExampleRepositoryName'
      target_branch:
        description: 'Target Branch (Place to Copy Workflow File TO)'
        required: true
        default: 'main'
      source_repo:
        description: 'Source Repository (Place to Copy Workflow File FROM)'
        required: true
        default: 'ExampleRepositoryName'
      source_branch:
        description: 'Source Branch (Place to Copy Workflow File FROM)'
        required: true
        default: 'master'
      workflow_file:
        description: 'Workflow File to Copy'
        required: true
        default: 'build.yaml'
      update_if_exists:
        description: 'Update Workflow File if it already exists in the target repository'
        required: true
        default: 'false'

jobs:
  update_secrets:
    name: Copy Workflow from Source to Target Repository
    runs-on: ubuntu-latest
    steps:
        - name: Clone Dependencies - my-actions-repository
          uses: actions/checkout@v3
          with:
              token: ${{secrets.GIT_TOKEN}}
              repository: my-organization-name/my-actions-repository
              ref: main
              path: ${{github.workspace}}/my-actions-repository

        - name: Copy Workflow from Source to Target Repository
          shell: bash
          run: |
            chmod 700 ${{github.workspace}}/my-actions-repository/utility/copy-workflow/
            ${{github.workspace}}/my-actions-repository/utility/copy-workflow/copy-workflow.sh "${{inputs.owner}}" "${{inputs.target_repo}}" "${{inputs.target_branch}}" "${{inputs.source_repo}}" "${{inputs.source_branch}}" "${{inputs.workflow_file}}" "${{inputs.update_if_exists}}"
          env:
            GITHUB_TOKEN: ${{ secrets.GIT_TOKEN }}

File Name – utility/copy-workflow/copy-workflow.sh

#!/bin/bash

# Get command-line arguments
owner="$1"
target_repo="$2"
target_branch="$3"
source_repo="$4"
source_branch="$5"
workflow_file="$6"
update_if_exists="$7"

echo "Disabling Branch Protection Rules for $target_branch on $target_repo"

gh api repos/$owner/$target_repo/branches/$target_branch/protection \
  -H "Accept: application/vnd.github.v3+json" \
  -X DELETE || echo "Branch protection rules already disabled or don't exist"

echo "Copying workflow: $workflow_file from '$source_repo' ($source_branch) to '$target_repo' ($target_branch)"

# Check if the source repository exists
echo "Checking source repository: $source_repo"
check_source_repo=$(gh api -X GET "/repos/$owner/$source_repo" -H 'Accept: application/vnd.github.v3+json' 2>&1)
if [[ $? -ne 0 ]]; then
  echo "Error: Failed to retrieve information for source repository: $source_repo"
  echo "$check_source_repo"
  exit 1
fi

# Check if the target repository exists
echo "Checking target repository: $target_repo"
check_target_repo=$(gh api -X GET "/repos/$owner/$target_repo" -H 'Accept: application/vnd.github.v3+json' 2>&1)
if [[ $? -ne 0 ]]; then
  echo "Error: Failed to retrieve information for target repository: $target_repo"
  echo "$check_target_repo"
  exit 1
fi

# Retrieve the workflow file from the source repository
echo "Retrieving workflow file: $workflow_file"
workflow_file_info=$(gh api -X GET "/repos/$owner/$source_repo/contents/.github/workflows/$workflow_file" -H 'Accept: application/vnd.github.v3+json' 2>&1)
if [[ $? -ne 0 ]]; then
  echo "Failed to retrieve workflow file: $workflow_file"
  echo "$workflow_file_info"
  exit 1
fi

workflow_content=$(echo "$workflow_file_info" | jq -r '.content')
workflow_sha=$(echo "$workflow_file_info" | jq -r '.sha')

# Check if the workflow file already exists in the target repository
echo "Checking if workflow file exists in target repository: $workflow_file"
existing_workflow=$(gh api -X GET "/repos/$owner/$target_repo/contents/.github/workflows/$workflow_file" -H 'Accept: application/vnd.github.v3+json' 2>&1)
exists=$?
if [[ $exists -eq 0 ]]; then
  existing_workflow_sha=$(echo "$existing_workflow" | jq -r '.sha')
  echo "Workflow file exists in the target repository. Grabbing the Sha: $existing_workflow_sha"
else
  echo "Workflow file does not exist in the target repository."
fi

# Update - Skip if Exists
if [[ $exists -eq 0 && "$update_if_exists" = false ]]; then
  # Update the workflow in the target repository
  echo "Skipping Update of Existing Workflow: $workflow_file"
fi

# Update - Update if Exists
if [[ $exists -eq 0 && "$update_if_exists" = true ]]; then
  # Update the workflow in the target repository
  echo "Updating workflow in target repository: $workflow_file"
  update_workflow=$(gh api -X PUT "/repos/$owner/$target_repo/contents/.github/workflows/$workflow_file" \
    -H 'Accept: application/vnd.github.v3+json' \
    -f branch="$target_branch" \
    -f content="$workflow_content" \
    -f message="Update workflow: $workflow_file" \
    -f sha="$existing_workflow_sha" 2>&1)
fi

# Create
if [[ $exists -eq 1 ]]; then
  # Create the workflow in the target repository
  echo "Creating workflow in target repository: $workflow_file"
  create_workflow=$(gh api -X PUT "/repos/$owner/$target_repo/contents/.github/workflows/$workflow_file" \
    -H 'Accept: application/vnd.github.v3+json' \
    -f branch="$target_branch" \
    -f content="$workflow_content" \
    -f message="Copy workflow: $workflow_file" 2>&1)
fi

if [[ $? -ne 0 ]]; then
  echo "Failed to copy workflow: $workflow_file"
  if [[ $exists -eq 0 && "$update_if_exists" = true ]]; then
    echo "$update_workflow"
  else
    echo "$create_workflow"
  fi
  exit 1
fi

# Notify the user that the workflow has been copied
echo "Workflow Logic Completed: $workflow_file from '$source_repo' ($source_branch) to '$target_repo' ($target_branch)."

echo "Enabling Branch Protection Rules for $target_branch on $target_repo"

PAYLOAD='{
    "required_status_checks": null,
    "enforce_admins": true,
    "required_pull_request_reviews": {
        "dismissal_restrictions": {
            "users": [],
            "teams": ["my_team_name"]
        },
        "dismiss_stale_reviews": false,
        "require_code_owner_reviews": false,
        "required_approving_review_count": 1,
        "require_last_push_approval": false,
        "bypass_pull_request_allowances": {
            "users": ["service_account_username"],
            "teams": []
        }
    },
    "restrictions": {
        "users": [],
        "teams": ["my_team_name"],
        "apps": []
    },
    "required_linear_history": false,
    "allow_force_pushes": false,
    "allow_deletions": false,
    "block_creations": true,
    "required_conversation_resolution": true,
    "lock_branch": false,
    "allow_fork_syncing": true
}'

echo "$PAYLOAD" | gh api repos/$owner/$target_repo/branches/$target_branch/protection \
  -H "Accept: application/vnd.github.v3+json" \
  -X PUT \
  --silent \
  --input - || (echo "Failed to enable branch protection rules for $target_branch on $target_repo" && exit 1)

GitHub CLI – Programmatically Disabling and Enabling Branch Protection Rules

Today I needed to be able to automatically disable and enable branch protection rules on a GitHub repository so that an Actions Build Template could be added to the main/master branch.

There is this known reported issue that despite adding a GitHub Actions Workflow file – unless it’s on the main/master branch – you can’t use the workflow_dispatch popup on the UI so build can be manually triggered (if needed).

So I did some investigation and created an automation to copy a workflow to the main/master branch.

In the process I uncovered that some repositories weren’t setup properly with their branch protection rules which blocked the automation.

As a result, I studied the GitHub API and wrote up the below snippet to allow temporary disable/enable of branch protection.

#!/bin/bash

# Get command-line arguments
owner="$1"
target_repo="$2"
target_branch="$3"

echo "Disabling Branch Protection Rules for $target_branch on $target_repo"

gh api repos/$owner/$target_repo/branches/$target_branch/protection \
  -H "Accept: application/vnd.github.v3+json" \
  -X DELETE || echo "Branch protection rules already disabled or don't exist"

echo "Enabling Branch Protection Rules for $target_branch on $target_repo"

PAYLOAD='{
    "required_status_checks": null,
    "enforce_admins": true,
    "required_pull_request_reviews": {
        "dismissal_restrictions": {
            "users": [],
            "teams": ["my_team_name"]
        },
        "dismiss_stale_reviews": false,
        "require_code_owner_reviews": false,
        "required_approving_review_count": 1,
        "require_last_push_approval": false,
        "bypass_pull_request_allowances": {
            "users": ["my_service_account_user_id"],
            "teams": []
        }
    },
    "restrictions": {
        "users": [],
        "teams": ["my_team_name"],
        "apps": []
    },
    "required_linear_history": false,
    "allow_force_pushes": false,
    "allow_deletions": false,
    "block_creations": true,
    "required_conversation_resolution": true,
    "lock_branch": false,
    "allow_fork_syncing": true
}'

echo "$PAYLOAD" | gh api repos/$owner/$target_repo/branches/$target_branch/protection \
  -H "Accept: application/vnd.github.v3+json" \
  -X PUT \
  --silent \
  --input - || (echo "Failed to enable branch protection rules for $target_branch on $target_repo" && exit 1)

Installing the Latest Version of Curl

I came across a bizarre issue yesterday with Ubuntu WSL on my development machine.

curl: (35) OpenSSL/3.0.8: error:0A000152:SSL routines::unsafe legacy renegotiation disabled

What made it bizarre is I’d been using regular curl commands for months and seemingly overnight I began getting these errors.

It turned out that policies had been updated internally to the latest OpenSSL Versions which – if you don’t have the latest version of Curl – caused failure until the package is updated.

So – this morning – I spent some time to write up a install script to upgrade Curl to the latest version on Ubuntu.

This should work on any version of linux – just swap the apt command with yum.

git clone https://github.com/curl/curl.git
cd curl
sudo apt install autoconf libtool make automake -y
sudo apt remove curl
sudo apt purge curl
autoreconf -fi
./configure --with-openssl
sudo make
sudo make install
sudo cp /usr/local/bin/curl /usr/bin/curl
sudo ldconfig

Then – to set a temporary configuration to fix the original error do the following…

echo "openssl_conf = openssl_init

[openssl_init]
ssl_conf = ssl_sect

[ssl_sect]
system_default = system_default_sect

[system_default_sect]
Options = UnsafeLegacyRenegotiation" > $HOME/.openssl.cnf

Under your .bash_profile for your user – add the following line…

export OPENSSL_CONF="$HOME/.openssl.cnf"

MariaDB – Creating an Event and Procedure

Taking some time today to share a simple example of how to create an Event and a Stored Procedure in MariaDB.

Use Case / Benefits:

  • Executes an Stored Procedure on a set schedule (like a cronjob)
  • Using a Stored Procedure in conjunction with an event allows you to change the code associated with the procedure without having to recreate the Event if something needs to be added

Example – Stored Procedure:

DROP PROCEDURE IF EXISTS `MariaDB-SchemaName`.`MY_MARIADB_PROCEDURENAME`;

-- This needs to be commented out unless testing locally in MySQL Workbench or equivalent
-- DELIMITER //
CREATE PROCEDURE `MariaDB-SchemaName`.`MY_MARIADB_PROCEDURENAME`(IN exampleBoolean BOOLEAN) 
BEGIN
    SELECT 'EXAMPLE QUERY' as example;
End
-- This needs to be commented out unless testing locally in MySQL Workbench or equivalent
-- //
-- DELIMITER ;

-- This needs to be commented out unless testing locally in MySQL Workbench or equivalent
-- CALL `MariaDB-SchemaName`.`MY_MARIADB_PROCEDURENAME`(FALSE); 

Example – Event:

-- See what timezone the MySQL/MariaDB Server is using
-- SELECT @@system_time_zone;

-- Production is using UTC 

-- See if event_scheduler shows up as a user
-- SHOW PROCESSLIST;

-- Turn it on if it's not enabled
-- SET GLOBAL event_scheduler = ON;

-- Show that it's now enabled
-- SHOW PROCESSLIST;

-- Get Today's Date and the Desired Refresh Time
SET @refresh_date=DATE_FORMAT(NOW(), '%Y-%m-%d');
SET @refresh_time='11:30:00';

-- Create Variable representing the Refresh Time
SET @refresh=CONCAT(@refresh_date, ' ', @refresh_time);
select @refresh;

-- Get a time in EST converted to UTC
SET @refresh_utc=convert_tz(@refresh,'US/Eastern','UTC');
select @refresh_utc;

-- Drop Event if it Exists Already
DROP EVENT IF EXISTS `MariaDB-SchemaName`.MY_MARIADB_EVENT_NAME;

-- Create Event and Execute Every 24 Hours
CREATE EVENT `MariaDB-SchemaName`.MY_MARIADB_EVENT_NAME
ON SCHEDULE EVERY 24 HOUR
STARTS @refresh_utc
ON COMPLETION PRESERVE
DO CALL `MariaDB-SchemaName`.`MY_MARIADB_PROCEDURENAME`(TRUE);

-- Enable Event
ALTER EVENT `MariaDB-SchemaName`.MY_MARIADB_EVENT_NAME ENABLE;

-- Show the Events
SHOW EVENTS FROM `MariaDB-SchemaName`;

PowerShell Script – Fixing DNS Resolution with WSL Ubuntu for While Connected to Anyconnect VPN

Had this odd issue over the past week that I’d been trying to figure out once connected to company VPN via Cisco AnyConnect.

This annoyed me enough that I wrote up a PowerShell Script to automatically handle the mode switching when on and off VPN.

Symptoms:

  • Curl didn’t work
  • Ping didn’t work

There were loads of solutions online but this one worked the best for me and required no modification of the Windows Network Setting – I don’t have administrative rights on my machine and wanted an option that didn’t require getting help from IT.

Automatic Script to Enable AnyConnect Mode for WSL Ubuntu:

  1. Create “enableAnyConnect.ps1” somewhere on computer
  2. Add the following code to the file
  3. Execute the file via PowerShell window using ./enableAnyConnect.ps1
wsl -d ubuntu bash -c "cat /etc/resolv.conf &&\
                       sudo cp /etc/resolv.conf /etc/resolv.conf.bak &&\
                       sudo rm -f /etc/wsl.conf &&\
                       echo '[network]' | sudo tee /etc/wsl.conf &&\
                       echo 'generateResolvConf = false' | sudo tee -a /etc/wsl.conf"

wsl --terminate ubuntu

wsl -d ubuntu bash -c "sudo cp --remove-destination /etc/resolv.conf.bak /etc/resolv.conf &&\
                       sudo sed -i '/nameserver/s/^/#/' /etc/resolv.conf"


$ciscoAnyconnectAdapter = Get-NetAdapter | Where-Object {$_.InterfaceDescription -Match "Cisco AnyConnect"}
$output = Get-DnsClientServerAddress -AddressFamily IPv4 | Where-Object {$_.InterfaceAlias -Match $ciscoAnyconnectAdapter.InterfaceAlias}

foreach($serverAddress in $output.ServerAddresses)
{
    wsl -d ubuntu bash -c "echo 'nameserver $serverAddress' | sudo tee -a /etc/resolv.conf"
}

wsl -d ubuntu bash -c "curl https://www.google.com"

Automatic Script to Disable AnyConnect Mode for WSL Ubuntu:

  1. Create “disableAnyConnect.ps1” somewhere on computer
  2. Add the following code to the file
  3. Execute the file via PowerShell window using ./disableAnyConnect.ps1
wsl -d ubuntu bash -c "sudo rm -f /etc/wsl.conf"

wsl --terminate ubuntu

wsl -d ubuntu bash -c "curl https://www.google.com"

Drawbacks

  • If the IP Address for the DNS server changes (reboot or needing to reauthenticate with AnyConnect) you’ll need to repeat just this section of the guide
    • My personal steps are to execute the disable script first then execute the enable script after

References:

Meeting Reminder Blackout – Now in Powershell!

If you follow this blog you may remember a post a long while back about my “Meeting Reminder Ball” that furiously bounces around the screen – and it’s sequel, a “Meeting Reminder Blackout” that gets rid of the ball concept and displays a reminder blackout on all screens.

Well, I took a new job recently and have stepped back into a position where I have limited administrative positions on my laptop – which posed the challenge that I couldn’t get Visual Studio Community installed without jumping through some hoops that would take a lot of time.

As a result, I took the syntax that I originally wrote in “Meeting Reminder Blackout” and converted it’s C# / .NET to the PowerShell equivalents (even managed to add extra features to allow for custom messages that are no longer hard coded)

If you’d like to give it a try, check out my GitHub Repository at the following location:

Enabling Hyper-V Services for Ubuntu to allow for Native SSH over Windows 10/11 Networking SOCKS

Today I had the need to be able to connect to my Ubuntu VM on Hyper-V without having to specify the randomly generated IP address and also be able to continue to use it for development while Cisco AnyConnect was redirecting all traffic for my computer.

I happened across a pretty great command called “hvc.exe” that can be used in Powershell which allows you to interact with a Virtual Machine using native Windows networking socks – thus eliminating the need for IP addresses.

The resulting command I came up with allows me to connect and use these native networking socks and bind the port to localhost at port 2222 – which is not hijacked by Cisco AnyConnect – so that was a win there!

  • hvc.exe ssh -L 2222:localhost:22 cody@UbuntuVM

The only hiccup I had was that I needed to install the official virtualization packages for Hyper-V via Ubuntu using their package manager.

The resulting command sequence that worked out for me was:

  • sudo apt-get update
  • sudo apt-get install linux-image-virtual linux-tools-virtual linux-cloud-tools-virtual
  • sudo reboot

All in all – I was pleasantly surprised that Hyper-V now has this capability as it reduces the manual steps I have to work through if my computer ends up rebooting or otherwise.

Bash/Powershell – Killing an SSH Tunnel Running on a Specific Port

On Windows – I use Git Bash and my custom bash profile to create “quality of life” commands for various use cases.

Today I had need to be able to kill an SSH Tunnel Process running on a specific port.

What made this tricky is that I needed to be able to use Powershell Commands via Git Bash (which I know is excessive, but I like things the way I like them)

PROCESS_TO_KILL="ssh.exe"
PROCESS_TO_KILL_POWERSHELL_EXPRESSION="\"name = '$PROCESS_TO_KILL'\""
PORT_TO_KILL="1993"
PORT_TO_KILL_POWERSHELL_EXPRESSION="'*$PORT_TO_KILL*'"

echo "[INFO] Listing Active SSH Tunnels..."
powershell.exe -command "Get-WmiObject Win32_Process -Filter $PROCESS_TO_KILL_POWERSHELL_EXPRESSION"

echo "[INFO] Filtering down to $PORT_TO_KILL..."
powershell.exe -command "Get-WmiObject Win32_Process -Filter $PROCESS_TO_KILL_POWERSHELL_EXPRESSION | where {\$_.CommandLine -like $PORT_TO_KILL_POWERSHELL_EXPRESSION }"

echo "[INFO] Killing Found SSH Tunnels..."
powershell.exe -command "Get-WmiObject Win32_Process -Filter $PROCESS_TO_KILL_POWERSHELL_EXPRESSION | where {\$_.CommandLine -like $PORT_TO_KILL_POWERSHELL_EXPRESSION } | ForEach-Object { taskkill /f /pid \$_.ProcessId }"

Windows Server – File Cleanup Script for Task Scheduler – Delete Files Older Than X Days

Every time I’ve written this script I never save it and have to end up googling it again to tweak behavior to the way I want.

So today I’m writing it down permanently!

Side Note – It’s funny how it always ends up being on some piece of legacy infrastructure I’ve inherited running Windows Server 2012 or 2008 no matter what job position I’ve been in.

This recurses into the subdirectories and deletes files older than 60 days.

It also outputs what was deleted to the specified log file.

What I did was create this as “cleanup.bat” and then created a Task Scheduler entry to call the batch file.

ForFiles /p "D:\MyCoolDirectory" /s /d -60 /c "cmd /c echo Deleting @file >> D:\deletion_output.log 2>&1"
ForFiles /p "D:\MyCoolDirectory" /s /d -60 /c "cmd /c del /q @file >> D:\deletion_output.log 2>&1"

SQL and PHP – Crafting a Programmatic Query that Safely Binds an Array for usage with an IN statement

One of the more useful parts of SQL is the usage of the IN statement in the WHERE clause to create a query that is easy to read and execute.

Unfortunately with prepared statements we can’t exactly bind anything to an IN statement ahead of time if the size of a data structure (array) is undetermined at the time of execution unless those values are listed one at a time.

So what I did today in PHP was essentially the same syntax to accomplish this as I’ve done previously in Java/C# – details below.

<?php

class ExampleClass
{
	
    /**
     * Get a list of messages that have not been processed yet optionally passing in values we want to ignore
     *
     * @throws Exception
     * @return array
     */
    public function getMessagesToProcess(?array $idValuesToIgnore) : array
    {
        // Optionally we can provide id values to ignore so we don't get them again.
        //
        // If there are no values in the array then a blank space is the only thing added to the where clause.
        $sqlInStatement = "";

        // SQL doesn't have native support for IN statements, so we have to do it manually 
        // but in a way that doesn't expose us to injection attacks.
        // 
        // Reference:
        // - https://stackoverflow.com/questions/37209686/doctrine-how-to-bind-array-to-the-sql
        if(!empty($idValuesToIgnore) && is_array($idValuesToIgnore) && count($idValuesToIgnore) != 0)
        {
            $sqlInStatementPrefix = "AND id NOT IN (";
            $sqlInStatementSuffix = ")";
            $sqlInStatementContent = null;
            
            $currentPosition = 1;
            $totalParametersToGenerate = count($idValuesToIgnore);

            while($currentPosition <= $totalParametersToGenerate)
            {
                $statementToAppend = ":id_" . $currentPosition;

                if(empty($sqlInStatementContent))
                {
                    $sqlInStatementContent = $statementToAppend;
                }
                else
                {
                    $sqlInStatementContent = "$sqlInStatementContent, $statementToAppend";
                }

                $currentPosition++;
            }

            $sqlInStatement = $sqlInStatementPrefix . $sqlInStatementContent . $sqlInStatementSuffix;
        }

        $sql = "SELECT
                    `id`
                FROM `Schema`.`Table`
                WHERE
                    `Table`.`processed_at` IS NULL
                    $sqlInStatement";

        $query = $this->db->prepare($sql);
        
        // Bind the values if there were any passed in as part of the constructed IN SQL earlier in the function
        if(!empty($idValuesToIgnore) && is_array($idValuesToIgnore) && count($idValuesToIgnore) != 0)
        {
            $currentPosition = 1;
        
            foreach($idValuesToIgnore as $id)
            {
                $parameterToBind = ":id_" . $currentPosition;
                $query->bindValue($parameterToBind, $id, PDO::PARAM_INT);
                $currentPosition++;
            }
        }
        
        try {
            $query->execute();

            $rowCount = $query->rowCount();

            if($rowCount != 0) {
                return $query->fetchAll();
            }
        } catch (\Exception|\Throwable $e) {
            $this->logger->error($e);
        }

        return [];
    }
}