diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/__init__.py b/examples/dfcx_agent_cicd/cicd_code/UAT/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/cloudbuild_deploy.yaml b/examples/dfcx_agent_cicd/cicd_code/UAT/cloudbuild_deploy.yaml new file mode 100644 index 00000000..ad5621dd --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/UAT/cloudbuild_deploy.yaml @@ -0,0 +1,49 @@ +steps: + + - id: SHAGCSCopy + name: gcr.io/google.com/cloudsdktool/cloud-sdk + #dir: 'set your path till the readme doc in the git' + entrypoint: /bin/bash + args: + - '-c' + - | + chmod 777 UAT/gcssha.sh + UAT/gcssha.sh $COMMIT_SHA + + - id: deployagent + name: 'python:3.10' + #dir: 'set your path till the readme doc in the git' + entrypoint: /bin/bash + args: + - -c + - | + pip3 install -r UAT/requirements.txt + python3 -m UAT.deploy $COMMIT_SHA + echo $? + + - id: CheckExitCode + name: 'gcr.io/cloud-builders/gcloud' + #dir: 'set your path till the readme doc in the git' + entrypoint: 'bash' + args: + - '-c' + - | + if [[ "$$BUILD_STATUS" -ne 0 ]]; then + echo "Stopping the build due to a previous failure." + exit 1 + fi + + + - id: triggerproddeploy + name: gcr.io/google.com/cloudsdktool/cloud-sdk + #dir: 'set your path till the readme doc in the git' + entrypoint: /bin/bash + args: + - '-c' + - | + chmod 777 UAT/trigger.sh + UAT/trigger.sh $LOCATION $COMMIT_SHA + + +options: + logging: CLOUD_LOGGING_ONLY \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/deploy.py b/examples/dfcx_agent_cicd/cicd_code/UAT/deploy.py new file mode 100644 index 00000000..56fffe96 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/UAT/deploy.py @@ -0,0 +1,71 @@ +""" UAT Deployment functions""" + +import sys +import json +import logging + +from shared.deployment import Deployment + + +#from .shared.deployments import Deployment +# logging config +logging.basicConfig( + level=logging.INFO, + format="UAT: %(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + + +def main(data): + """ + Deploys and tests a Dialogflow CX agent in a UAT environment. + + This function performs the following steps: + + 1. Initializes a Deployment object with the provided data. + 2. Imports the agent to the specified UAT webhook environment. + 3. Validates test cases. + 4. Collects flow IDs. + 5. Deletes versions based on count. + 6. Cuts a new version. + 7. Deploys the new version. + 8. Updates the datastore with UAT information. + + Args: + data: A dictionary containing configuration data, including the 'uat_webhook_env' key. + """ + + dep=Deployment(data) + # call the steps sequentially + dep.import_agent(webhookenv=data["uat_webhook_env"]) + dep.test_case_validation() + dep.collect_flow_id() + dep.version_count_delete() + dep.version_cut() + dep.deploy_versions() + dep.datastore_update("uat") + + + +if __name__=="__main__": + # read env variables + with open("config.json" , encoding='utf-8') as config_file: + config = json.load(config_file) + SHA_ID=sys.argv[1] + obj=f"UAT/{config['agent_name']}/{SHA_ID}" + sha_gs_loc=( + f"gs://{config['bucket']}/UAT/{config['agent_name']}/{SHA_ID}" + ) + logging.info("Agent location: %s" ,sha_gs_loc) + #adding additional variables to dict + config["sha_agent_gcs_location"]=sha_gs_loc + config["target_project_id"] = config["uat_project"] + config["target_environment_name"]=config["uat_env_deploy"] + with open("agent_artifacts/metadata.json" , encoding='utf-8') as metadata_file: + metadata = json.load(metadata_file) + + config["source_flow_names"]=metadata["source_flow_names"] + config["updated_commit_message"]=metadata["updated_commit_message"] + + # To execute steps in order + main(config) diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/gcssha.sh b/examples/dfcx_agent_cicd/cicd_code/UAT/gcssha.sh new file mode 100644 index 00000000..88ee0712 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/UAT/gcssha.sh @@ -0,0 +1,21 @@ + +# Set your GCS bucket name and destination directory +apt-get update && apt-get install -y jq +export GCS_BUCKET=$(jq -r .bucket config.json) +export agent_name=$(jq -r .agent_name config.json) +export DESTINATION_DIR="UAT/${agent_name}/" +echo $DESTINATION_DIR +# Create a local directory +mkdir -p $1 + +# Copy your two files to the local directory +cp agent_artifacts/$agent_name $1 +cp agent_artifacts/metadata.json $1 + +# Upload the local directory to GCS +gsutil -m cp -r $1 "gs://$GCS_BUCKET/$DESTINATION_DIR" + +# Clean up the local directory if needed +rm -r $1 + +echo "Files copied and uploaded to GCS." \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/requirements.txt b/examples/dfcx_agent_cicd/cicd_code/UAT/requirements.txt new file mode 100644 index 00000000..62207d27 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/UAT/requirements.txt @@ -0,0 +1,3 @@ +dfcx-scrapi +google-cloud-storage +pandas \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/UAT/trigger.sh b/examples/dfcx_agent_cicd/cicd_code/UAT/trigger.sh new file mode 100644 index 00000000..bf83d555 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/UAT/trigger.sh @@ -0,0 +1,22 @@ +echo $1 +apt-get update && apt-get install -y jq +export devops_project_id=$(jq -r .devops_project config.json) +export prod_project_id=$(jq -r .prod_project config.json) + +#Use below command to trigger the build if manual invokation is used. Since there is no secret , no extra charges + +export build_info=$(gcloud builds triggers run prodbuild --project=$devops_project_id --substitutions=_COMMIT_SHA=$2 --region=$1 --format=json) +echo "devops prod triggerdone" + + +#getting the trigger id of the above trigger + +export prod_build_id=$(echo "$build_info" | jq -r '.metadata.build.id') +echo "build id returned back is" +echo $prod_build_id + + +#Trigger the build in prod project which is used for approval +gcloud builds triggers run prodapprovebuild --project=$devops_project_id --substitutions=_APP_BUILD_ID=$prod_build_id --region=$1 + +echo "prod project approve build triggered" \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/__init__.py b/examples/dfcx_agent_cicd/cicd_code/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/agent_artifacts/.gitkeep b/examples/dfcx_agent_cicd/cicd_code/agent_artifacts/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/approveprod/cloudbuild_appr.yaml b/examples/dfcx_agent_cicd/cicd_code/approveprod/cloudbuild_appr.yaml new file mode 100644 index 00000000..b9bf63dc --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/approveprod/cloudbuild_appr.yaml @@ -0,0 +1,36 @@ +steps: + - name: gcr.io/google.com/cloudsdktool/cloud-sdk + args: + - '-c' + - | + apt-get update && apt-get install -y jq + echo $BUILD_ID + + export devopsprojecthere=$(jq -r .devops_project config.json) + export build_info=$(gcloud builds describe $BUILD_ID --region=us-central1 --format=json) + export approverhere=$(echo "$build_info" | jq -r '.approval.result.approverAccount') + export commenthere=$(echo "$build_info" | jq -r '.approval.result.comment') + export tokenhere=$(gcloud auth print-access-token) + + echo $approverhere + echo $tokenhere + + chmod 777 approveprod/trigger.sh + + sed -i "s/tokenhere/$tokenhere/g" approveprod/trigger.sh + sed -i "s/approverhere/$approverhere/g" approveprod/trigger.sh + sed -i "s/devopsprojecthere/$devopsprojecthere/g" approveprod/trigger.sh + sed -i "s/commenthere/$commenthere/g" approveprod/trigger.sh + sed -i "s/appbuildhere/$_APP_BUILD_ID/g" approveprod/trigger.sh + cat approveprod/trigger.sh + approveprod/trigger.sh + echo $? + echo "prod build approved from code" + + echo "error exit code" + + id: triggerexportbuild + entrypoint: /bin/bash +options: + logging: CLOUD_LOGGING_ONLY + dynamicSubstitutions: true diff --git a/examples/dfcx_agent_cicd/cicd_code/approveprod/trigger.sh b/examples/dfcx_agent_cicd/cicd_code/approveprod/trigger.sh new file mode 100644 index 00000000..1b35ea46 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/approveprod/trigger.sh @@ -0,0 +1,6 @@ +curl --request POST \ + 'https://cloudbuild.googleapis.com/v1/projects/devopsprojecthere/locations/us-central1/builds/appbuildhere:approve?access_token=tokenhere' \ + --header 'Accept: application/json'\ + --header 'Content-Type:application/json' --data \ + '{"approvalResult":{"decision":"APPROVED","comment":"commenthere","approverAccount":"approverhere"}}' \ + --compressed \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/config.json b/examples/dfcx_agent_cicd/cicd_code/config.json new file mode 100644 index 00000000..70b2a550 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/config.json @@ -0,0 +1,16 @@ +{ + "agent_name" : "carrental", + "dev_env_pull" : "ready to deploy", + "uat_env_deploy" : "ready to test", + "prod_env_deploy" :"deployed", + "devprodsyncenv" :"deployed", + "bucket": "dfcx_agent_cicd_export", + "dev_project": "yourprojectid", + "uat_project" : "yourprojectid", + "prod_project": "yourprojectid", + "devops_project": "yourprojectid", + "uat_webhook_env": "uat", + "prod_webhook_env": "prod", + "uat_engine_id" :"", + "prod_engine_id" :"" +} \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/__init__.py b/examples/dfcx_agent_cicd/cicd_code/export/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export.yaml b/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export.yaml new file mode 100644 index 00000000..90356678 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export.yaml @@ -0,0 +1,147 @@ + +availableSecrets: + secretManager: + - versionName: # your version name + env: 'SSH_KEY' + +steps: +# Access the id_github file fvikramvikrom Secret Manager, and setup SSH + - id: mountsshkey + name: 'gcr.io/cloud-builders/git' + #dir: 'set your path till the readme doc in the git' + secretEnv: ['SSH_KEY'] + entrypoint: /bin/bash + args: + - -c + - | + echo "$$SSH_KEY" >> /root/.ssh/id_rsa + chmod 400 /root/.ssh/id_rsa + cp known_hosts.github /root/.ssh/known_hosts + volumes: + - name: 'ssh' + path: /root/.ssh + + # Clone the repository + - id: clonerepo + name: 'gcr.io/cloud-builders/git' + #dir: 'set your path till the readme doc in the git' + args: + - clone + - --recurse-submodules + - git@github.com:$REPO_FULL_NAME + volumes: + - name: 'ssh' + path: /root/.ssh + + - id: limitbuildcheck + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + export parallelbuild=$(gcloud builds list --region=$LOCATION --filter="substitutions.TRIGGER_NAME=$TRIGGER_NAME AND status=WORKING" --format="value(status)" | wc -l) + export approvebuild=$(gcloud builds list --region=$LOCATION --format="value(status)" --filter="substitutions.TRIGGER_NAME='prodbuild' AND status='PENDING'" | wc -l) + if [ $parallelbuild -gt 1 ] + then + echo "parallel build running. This may corrupt the exported files in GCS location" + exit 1 + else + echo "Proceeding. No other parallel export build" + fi + if [ $approvebuild -gt 0 ] + then + echo "some other build waiting for approval" + exit 1 + else + echo "Proceeding. No builds waiting for approval" + fi + + - id: fetchuser + #dir: 'set your path till the readme doc in the git' + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - '-c' + - | + echo $BUILD_ID + export buildhere=$BUILD_ID + export trigid=$(gcloud builds describe $BUILD_ID --region=$LOCATION --format="value(buildTriggerId)") + sed -i "s/triggerhere/$trigid/g" export/trigger.sh + chmod 777 export/trigger.sh + export w1=$(export/trigger.sh) + export w2=$(echo $w1 | cut -d " " -f2) + export runnerid=$w2 + export runnername=$(echo $runnerid | cut -d '@' -f 1) + echo $buildhere + echo $runnerid + echo $runnername + pwd + ls + echo $runnername > ./runnername.txt + echo $runnerid > ./runnerid.txt + echo "path of runner id" + pwd + + + - id: Exportgcs + #dir: 'set your path till the readme doc in the git' + name: 'python:3.10' + entrypoint: /bin/bash + args: + - -c + - | + ls + pwd + pip3 install -r export/requirements.txt + export runnerid=$(cat runnerid.txt) + echo "runner id is " + echo $runnerid + python3 -m export.export ${_USERCOMMITMESSAGE} $runnerid + + - id: downloadartifacts + #dir: 'set your path till the readme doc in the git' + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + apt-get update && apt-get install -y jq + export agent_name=$(jq -r .agent_name config.json) + export bucket_name=$(jq -r .bucket config.json) + echo $agent_name + echo $bucket_name + mkdir agenttemp + gsutil cp "gs://$bucket_name/exports/dev/$agent_name" agenttemp/$agent_name + gsutil cp "gs://$bucket_name/exports/dev/${agent_name}_metadata.json" agenttemp/metadata.json + + - id: csrcheckin + #dir: 'set your path till the readme doc in the git' + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + export runnerid=$(cat runnerid.txt) + export runnername=$(cat runnername.txt) + + export agent_artifacts_path = $(dirname $(dirname $TRIGGER_BUILD_CONFIG_PATH)) + chmod 777 export/repopush.sh + export/repopush.sh $REPO_NAME $agent_artifacts_path + cd $REPO_NAME/$agent_artifacts_path + ls + cd agent_artifacts + ls + git config --global user.name $runnername + git config --global user.email $runnerid + git add . + git diff --name-only + git commit --allow-empty -m "commited by $runnerid with message ${_USERCOMMITMESSAGE}" + + git push -u origin main + volumes: + - name: 'ssh' + path: /root/.ssh + +options: + logging: CLOUD_LOGGING_ONLY + dynamicSubstitutions: true diff --git a/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export_csr.yaml b/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export_csr.yaml new file mode 100644 index 00000000..8aa0973d --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/cloudbuild_export_csr.yaml @@ -0,0 +1,92 @@ +steps: + + - id: limitbuildcheck + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + export parallelbuild=$(gcloud builds list --region=us-central1 --filter="substitutions.TRIGGER_NAME=$TRIGGER_NAME AND status=WORKING" --format="value(status)" | wc -l) + export approvebuild=$(gcloud builds list --region=us-central1 --format="value(status)" --filter="substitutions.TRIGGER_NAME='prodbuild' AND status='PENDING'" | wc -l) + if [ $parallelbuild -gt 1 ] + then + echo "parallel build running. This may corrupt the exported files in GCS location" + exit 1 + else + echo "Proceeding. No other parallel export build" + fi + if [ $approvebuild -gt 0 ] + then + echo "some other build waiting for approval" + exit 1 + else + echo "Proceeding. No builds waiting for approval" + fi + + - id: fetchuser + name: gcr.io/google.com/cloudsdktool/cloud-sdk + #dir: your/path/here till the readme dir + entrypoint: /bin/bash + args: + - '-c' + - | + echo $BUILD_ID + export buildhere=$BUILD_ID + export trigid=$(gcloud builds describe $BUILD_ID --region=us-central1 --format="value(buildTriggerId)") + sed -i "s/triggerhere/$trigid/g" export/trigger.sh + chmod 777 export/trigger.sh + export w1=$(export/trigger.sh) + export w2=$(echo $w1 | cut -d " " -f2) + export runnerid=$w2 + export runnername=$(echo $runnerid | cut -d '@' -f 1) + echo $buildhere + echo $runnerid + echo $runnername + pwd + ls + echo $runnername > ./runnername.txt + echo $runnerid > ./runnerid.txt + + + - id: Exportgcs + #dir: your/path/here till the readme dir + name: 'python:3.10' + entrypoint: /bin/bash + args: + - -c + - | + pip3 install -r export/requirements.txt + export runnerid=$(cat runnerid.txt) + python3 -m export.export ${_USERCOMMITMESSAGE} $runnerid + + - id: downloadartifacts + #dir: your/path/here till the readme dir + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + apt-get update && apt-get install -y jq + export agent_name=$(jq -r .agent_name config.json) + export bucket_name=$(jq -r .bucket config.json) + echo $agent_name + echo $bucket_name + mkdir agenttemp + gsutil cp "gs://$bucket_name/exports/dev/$agent_name" agenttemp/$agent_name + gsutil cp "gs://$bucket_name/exports/dev/${agent_name}_metadata.json" agenttemp/metadata.json + + - id: repocheckin + #dir: your/path/here till the readme dir + name: gcr.io/google.com/cloudsdktool/cloud-sdk + entrypoint: /bin/bash + args: + - -c + - | + export runnerid=$(cat runnerid.txt) + export runnername=$(cat runnername.txt) + chmod 777 export/repopush_csr.sh + export/repopush_csr.sh ${_USERCOMMITMESSAGE} $runnername $runnerid + +options: + logging: CLOUD_LOGGING_ONLY + dynamicSubstitutions: true \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/export.py b/examples/dfcx_agent_cicd/cicd_code/export/export.py new file mode 100644 index 00000000..fd2504d9 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/export.py @@ -0,0 +1,131 @@ +""" export functions""" + +import json +import sys +import logging + +from dfcx_scrapi.core.agents import Agents + +from .flow_impacted import Impacted +from google.cloud import storage + +# logging config +logging.basicConfig( + level=logging.INFO, + format="dev: %(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + +def agent_to_gcs( + agent_name, + project_id, + environment_name, + gsloc): + """Exports a Dialogflow CX agent to Google Cloud Storage (GCS). + + This function exports a specified Dialogflow CX agent and its environment + to a designated location in Google Cloud Storage. + + Args: + agent_name: The display name of the agent to export. + project_id: The ID of the Google Cloud project where the agent resides. + environment_name: The display name of the environment to export. + gsloc: The GCS bucket URI where the agent will be exported. + + Returns: + None + """ + agents=Agents() + + agent_details=agents.get_agent_by_display_name( + display_name=agent_name, + project_id=project_id + ) + + agent_id=agent_details.name + agent_gcs_location=f"{gsloc}/{agent_name}" + + #export will replace exisitng agent in bucket + agents.export_agent(agent_id=agent_id, + gcs_bucket_uri=agent_gcs_location, + environment_display_name=environment_name) + logging.info("Agent export from dev done") + +def meta_to_gcs( + config_data, + flow_names, + commit_message, + gsloc, + agent_name, + gcs_bucket, + version_ids + ): + """Exports metadata to a JSON file in Google Cloud Storage (GCS). + + This function takes configuration data, flow names, a commit message, + GCS location information, agent name, and version IDs, and creates a JSON + file containing this metadata in the specified GCS bucket. + + Args: + config_data: A dictionary containing configuration data. + flow_names: A list of flow names. + commit_message: The commit message to include in the metadata. + gsloc: The full GCS URI where the metadata file will be stored. + agent_name: The name of the agent. + gcs_bucket: The name of the GCS bucket. + version_ids: A list of version IDs. + + Returns: + None + """ + + config_data["source_flow_names"]=flow_names + config_data["impacted_version_ids"]= version_ids + config_data["updated_commit_message"]=commit_message + gcslist=gsloc.split("/") + obj="/".join(gcslist[3:]) + + bucket_obj = storage.Client().get_bucket(gcs_bucket) + + blob = bucket_obj.blob(f"{obj}/{agent_name}_metadata.json") + blob.upload_from_string(data=json.dumps(config_data), + content_type='application/json') + + +if __name__=='__main__': + # read env variables + with open("config.json", encoding='utf-8') as config_file: + config = json.load(config_file) + + source_project_id=config["dev_project"] + source_agent_name=config["agent_name"] + source_environment_name=config["dev_env_pull"] + bucket=config["bucket"] + user_commit_message=sys.argv[1] + userid=sys.argv[2] + #updated_commit_message=f"{user_commit_message} by {userid} for {source_agent_name}" + updated_commit_message = ( + f"{user_commit_message} by {userid} " + f"for {source_agent_name}" + ) + impflows=Impacted(source_project_id=source_project_id, + source_agent_name=source_agent_name, + environment_name=source_environment_name) + imp_flow_map,impacted_version_ids=impflows.check_flow() + source_flow_names=list(imp_flow_map.values()) + source_flow_ids=list(imp_flow_map.keys()) + gs_loc=f"gs://{bucket}/exports/dev" + + logging.info("impacted flow is %(imp_flow_map)s" + , {'imp_flow_map': imp_flow_map}) + + + #Execute in steps + agent_to_gcs(source_agent_name, + source_project_id, + source_environment_name, + gs_loc) + meta_to_gcs(config,source_flow_names, + updated_commit_message,gs_loc, + source_agent_name,bucket,impacted_version_ids) + \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/flow_impacted.py b/examples/dfcx_agent_cicd/cicd_code/export/flow_impacted.py new file mode 100644 index 00000000..150120f5 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/flow_impacted.py @@ -0,0 +1,120 @@ +""" Getting impacted flow functions""" + +from dfcx_scrapi.core.agents import Agents +from dfcx_scrapi.core.environments import Environments +from dfcx_scrapi.core.flows import Flows + +from typing import Dict + + +class Impacted: + """ + Analyzes and identifies changes in Dialogflow CX agent flows across environment versions. + + This class retrieves information about a specified Dialogflow CX agent and its environment, + including version history and flow details. It then compares the latest two versions to + identify any changes in the flows, providing a mapping of impacted flow IDs and names. + + Attributes: + source_project_id: The ID of the Google Cloud project where the agent resides. + source_agent_name: The display name of the agent. + environment_name: The display name of the agent's environment (default: "ready to deploy"). + + Methods: + filter_flows: (Static method) Filters a flow map based on + differences between two environments. + check_flow: Identifies and returns a dictionary of + changed flows between the latest two versions. + """ + + #Get agent id + + def __init__( + self,source_project_id, + source_agent_name, + environment_name="ready to deploy" + ): + self.env=Environments() + self.flows=Flows() + + self.source_project_id=source_project_id + self.source_agent_name=source_agent_name + self.environment_name=environment_name + self.filtered_dict={} + + agents=Agents() + agent_details=agents.get_agent_by_display_name( + display_name=self.source_agent_name, + project_id=self.source_project_id + ) + + self.agent_id=agent_details.name + + #get environment id + env_details=self.env.get_environment_by_display_name( + display_name=self.environment_name + ,agent_id=self.agent_id + ) + self.env_id=env_details.name + + #get history + self.hist=self.env.lookup_environment_history( + environment_id=self.env_id + ) + + @staticmethod + def filter_flows(env1,env2,flowmap,versions): + """ Returns filtered dict and impacted version ids""" + impacted_flows=[] + for k,v in env1.items(): + if v!=env2.get(k,0): + impacted_flows.append(k) + + filtered_dict = { + k: v for k, v in flowmap.items() + if k.split("/")[-1] in impacted_flows + } + #getting version ids + impacted_version_ids=[] + for ver in versions: + ver=ver.version + flow=ver.split("/")[-3] + if flow in impacted_flows: + impacted_version_ids.append(ver) + + + return filtered_dict,impacted_version_ids + + + + def check_flow( + self + ) -> Dict[str, str]: + #compare latest 2 history + """ + returns map of flow id:flow name which was found to be changed + """ + env1={} + for i in self.hist[0].version_configs: + flow=i.version.split("/")[-3] + version=i.version.split("/")[-1] + env1[flow]=version + + env2={} + if len(self.hist)>1: + for i in self.hist[1].version_configs: + flow=i.version.split("/")[-3] + version=i.version.split("/")[-1] + env2[flow]=version + + #get flow map for id name comparision + flowmap=self.flows.get_flows_map(agent_id=self.agent_id) + + self.filtered_dict,self.impacted_version_ids = Impacted.filter_flows( + env1, + env2, + flowmap, + self.hist[0].version_configs + ) + + return self.filtered_dict,self.impacted_version_ids diff --git a/examples/dfcx_agent_cicd/cicd_code/export/repopush.sh b/examples/dfcx_agent_cicd/cicd_code/export/repopush.sh new file mode 100644 index 00000000..e4d08509 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/repopush.sh @@ -0,0 +1,14 @@ +apt-get update && apt-get install -y jq +export project_id=$(jq -r .devops_project config.json) +export agent_name=$(jq -r .agent_name config.json) +echo $agent_name + +cd $1 +git checkout main +echo "pwd" +pwd +date > agent_artifacts/timestamp.txt +rm agent_artifacts/* +cp /workspace/$2/agenttemp/$agent_name agent_artifacts/ +cp /workspace/$2/agenttemp/metadata.json agent_artifacts/ +date > agent_artifacts/timestamp.txt \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/repopush_csr.sh b/examples/dfcx_agent_cicd/cicd_code/export/repopush_csr.sh new file mode 100644 index 00000000..f952cce2 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/repopush_csr.sh @@ -0,0 +1,30 @@ +apt-get update && apt-get install -y jq +export project_id=$(jq -r .devops_project config.json) +export agent_name=$(jq -r .agent_name config.json) +echo $agent_name +cd agenttemp +ls -all +gcloud source repos clone agentcicd --project=$project_id +#git remote add google 'https://source.developers.google.com/p/xxx/r/agentTest' + +cd agentcicd +git checkout main +ls -all + +rm agent_artifacts/* +cp ../$agent_name agent_artifacts/ +cp ../metadata.json agent_artifacts/ +date > agent_artifacts/timestamp.txt +cd agent_artifacts +ls +cd .. +echo $3 +git config --global user.name $2 +git config --global user.email $3 + +git add . +echo "$1" +git diff --name-only +git commit --allow-empty -m "$1" + +git push -u origin main \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/requirements.txt b/examples/dfcx_agent_cicd/cicd_code/export/requirements.txt new file mode 100644 index 00000000..62207d27 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/requirements.txt @@ -0,0 +1,3 @@ +dfcx-scrapi +google-cloud-storage +pandas \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/export/trigger.sh b/examples/dfcx_agent_cicd/cicd_code/export/trigger.sh new file mode 100644 index 00000000..46dec1ad --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/export/trigger.sh @@ -0,0 +1 @@ +gcloud logging read 'resource.labels.build_trigger_id="triggerhere" AND protoPayload.methodName="google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger"' --limit 20 | grep principalEmail | head -n 1 \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image1.png b/examples/dfcx_agent_cicd/cicd_code/media/image1.png new file mode 100644 index 00000000..0eb79237 Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image1.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image2.png b/examples/dfcx_agent_cicd/cicd_code/media/image2.png new file mode 100644 index 00000000..f977a083 Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image2.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image3.png b/examples/dfcx_agent_cicd/cicd_code/media/image3.png new file mode 100644 index 00000000..87d20e63 Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image3.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image4.png b/examples/dfcx_agent_cicd/cicd_code/media/image4.png new file mode 100644 index 00000000..8131da4b Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image4.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image5.png b/examples/dfcx_agent_cicd/cicd_code/media/image5.png new file mode 100644 index 00000000..e48c83bd Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image5.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/media/image6.png b/examples/dfcx_agent_cicd/cicd_code/media/image6.png new file mode 100644 index 00000000..1d162363 Binary files /dev/null and b/examples/dfcx_agent_cicd/cicd_code/media/image6.png differ diff --git a/examples/dfcx_agent_cicd/cicd_code/prod/__init__.py b/examples/dfcx_agent_cicd/cicd_code/prod/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/prod/cloudbuild_deploy.yaml b/examples/dfcx_agent_cicd/cicd_code/prod/cloudbuild_deploy.yaml new file mode 100644 index 00000000..d6925eca --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/prod/cloudbuild_deploy.yaml @@ -0,0 +1,15 @@ +steps: + - id: deployagent + name: 'python:3.10' + #dir: 'set your path till the readme doc in the git' + entrypoint: /bin/bash + args: + - -c + - | + echo "printing recieved variables now" + echo ${_COMMIT_SHA} + pip3 install -r prod/requirements.txt + python3 -m prod.deploy $COMMIT_SHA + +options: + logging: CLOUD_LOGGING_ONLY \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/prod/deploy.py b/examples/dfcx_agent_cicd/cicd_code/prod/deploy.py new file mode 100644 index 00000000..20aca8af --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/prod/deploy.py @@ -0,0 +1,82 @@ +""" Deploy to prod functions """ +import json +import logging +import sys + +from shared.deployment import Deployment + +# logging config +logging.basicConfig( + level=logging.INFO, + format="PROD: %(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + +def main(data): + """ + Deploys and validates a Dialogflow CX agent in a production environment. + This function orchestrates the deployment and validation of a Dialogflow CX agent + in a production environment. It performs the following key steps: + + 1. Imports the agent to the specified production webhook environment. + 2. Performs a language check on fulfillment entries, parameters, and routes, + specifically for French Canadian ('fr-ca'). + 3. Collects flow IDs. + 4. Manages version count and deletion. + 5. Cuts a new version of the agent. + 6. Deploys the new version to production. + 7. Synchronizes the agent between development and production environments. + 8. Updates the datastore with production information. + + Args: + data: A dictionary containing configuration data, including the 'prod_webhook_env' key. + + Raises: + SystemExit: If the language check fails, indicating missing agent responses. + """ + dep=Deployment(data) + # call the steps sequentially + dep.import_agent(webhookenv=data["prod_webhook_env"]) + + entry,param,route,result=dep.fullfillment_lang_check(lang='fr-ca') + + logging.info("Entry fulfilment is %s",entry) + logging.info("Param fulfilment is %s",param) + logging.info("Route fulfilment is %s",route) + if not result: + print("some pages,parameters, routes does not have agent response") + sys.exit(2) + + dep.collect_flow_id() + dep.version_count_delete() + dep.version_cut() + dep.deploy_versions() + dep.dev_prod_sync() + dep.datastore_update("prod") + + + +if __name__=='__main__': + # read env variables + with open("config.json" , encoding='utf-8') as config_file: + config = json.load(config_file) + + SHA_ID=sys.argv[1] + obj=f"UAT/{config['agent_name']}/{SHA_ID}" + sha_agent_gcs_location=( + f"gs://{config['bucket']}/UAT/{config['agent_name']}/{SHA_ID}" + ) + logging.info("agent location %s", sha_agent_gcs_location) + #adding additional variables to dict + config["sha_agent_gcs_location"]=sha_agent_gcs_location + config["target_project_id"] = config["prod_project"] + config['target_environment_name']=config["prod_env_deploy"] + with open("agent_artifacts/metadata.json" , encoding='utf-8') as metadata_file: + metadata = json.load(metadata_file) + + config["source_flow_names"]=metadata["source_flow_names"] + config["updated_commit_message"]=metadata["updated_commit_message"] + config["impacted_version_ids"]=metadata["impacted_version_ids"] + + # To execute steps in order + main(config) diff --git a/examples/dfcx_agent_cicd/cicd_code/prod/requirements.txt b/examples/dfcx_agent_cicd/cicd_code/prod/requirements.txt new file mode 100644 index 00000000..62207d27 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/prod/requirements.txt @@ -0,0 +1,3 @@ +dfcx-scrapi +google-cloud-storage +pandas \ No newline at end of file diff --git a/examples/dfcx_agent_cicd/cicd_code/readme.md b/examples/dfcx_agent_cicd/cicd_code/readme.md new file mode 100644 index 00000000..0dbab79e --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/readme.md @@ -0,0 +1,366 @@ +This Document outlines how to use this sample code and set up the CICD +pipeline in your gcp projects. + +# Primer + +Using this CICD pipeline, you can + +- Promote/migrate agent across 3 GCP projects a.k.a dev -\> uat -\> + prod + +Below are the steps happens in each of the 3 projects while promoting +the agent. + +**In Dev project** + +- Export agent from a designated DFCX environment from dev(coming from + config file) + +- Automatically detects which flows are impacted based on history and + in dev project and save it in metadata file + +- Automatically sync the flows in deployed DFCX environments once a + flow is deployed in prod + +**In UAT project** + +- Creates new versions of only those impacted flows in uat and deploy + them to designated DFCX environment(coming from config file) + +- Run test cases relevant to impacted flows and roll back if failed in + UAT + +- Automatically delete the previous older versions of flows in uat and + prod once the limit is reached + +- Update the webhook apigee environment in the webhook url + corresponding to UAT project + +- If there are multiple languages configured in the agent, it will + automatically verify other languages pages against English language + pages to see if agent fulfillments/response are present in every + other languages configured. + +- Gives mechanism for UAT team to approve in UI once testing is + completed and deploy the agent to prod post the approval + +- Automatically sync the flows in deployed DFCX environments once a + flow is deployed in prod + +**In Prod project** + +- Post UAT team approves after UAT testing, It creates new versions of + only those impacted flows in prod and deploy them to designated DFCX + environment(coming from config file) + +- Update the webhook apigee environment in the webhook url + corresponding to prod project + +- Automatically delete the previous older versions of flows in uat and + prod once the limit is reached + +- Automatically deploys the impacted flows and deploy to serving DFCX + environment in prod + +# High level Architecture + +![](media/image1.png) + +# + +# + +# Set up + +## Assumptions: + +1. You have GCP account already + +2. You have created 3 separate projects for dev/uat/prod + +3. You have an agent in dev and created a dummy/empty agent in the same + name in uat and prod project. + +4. You have a git or similar repo to source the code and to do agent + check in to store agent artifacts whenever the build is triggered. + +5. You are going to use same repo to store code artifacts as well as to + store agent artifacts that gets checked in during the build process + +6. You will create all the 3 builds in your dev project. If need be, + you can have all the builds in a centralized project and play around + with IAM service accounts to enable the builds to access the agent + in dev/uat/prod projects for migration + +7. You will generate and put the known_hosts.github file in the repo in + same level as this readme file is + +8. Set the dir parameter in all the build steps of all the 3 yaml if + your repo does not contain this readme file and other folders such + as export,UAT and prod in repo root. If these core files are nested + then set the dir to the path where this readme file is present + +## IAM Permissions + +1. Create a service account and give the following IAM permissions. + +- **Dialogflow CX API Admin** + +- **Dialogflow API Client** + +- **Storage.admin and Storage Object User** + +- **CSR/Git access** + +- **Service Usage Consumer** + +- **Dialogflow \> Dialogflow Test Case Admin** + +- **Dialogflow \> Dialogflow Environment editor** + +- **Cloud Build service account** + +- **Logs Viewer** + +- **Logs Writer** + +- **Cloud Build viewer** + +2. Give the approver person with **cloudbuild.builds.approve** access + in Dev project + +## For the UAT and Prod builds to access UAT and PROD project to deploy the agent (See assumption no. 5), + +- Get the service account the is used by the cloud builds in your Dev + project + +- Go to UAT and Prod projects \> IAM role \> add principal and enter + the service account id you got from previous step and give access to + UAT/PROD service account as Service Usage Consumer **and** + Dialogflow API Admin + +- Give Dev build's service account with **cloudbuild.builds.get** + access + +## Code Repository and Branching Strategy + +This section describes the approach for setting up the repository and +branches for the agents. + +**Source Repository** + +Below is the reason why we need a repository + +1. Cloud Builds need to refer to some place to access the code that it + needs to build + +2. Some Cloud Builds are set to get triggered when an agent artifact is + checked in to the repository automatically. + +3. Maintain an audit trail to see who changed the code + +4. Maintain an audit trail to see who checked in agent artifacts in + repo along with a commit message that explains what the change in + agent/flow is. + +You can use either the GCP's private git ie cloud source repository or +other gits such as github. + +If you use CSR(deprecated for new users) then use the set +export/cloudbuild_export_csr.yaml and repopush_csr.sh files. + +If you use github then use use the set export/cloudbuild_export.yaml and +repopush.sh files. + +## Storage Bucket + +Create a gcs bucket and that will be used by the pipeline for storing +agents while exporting and restoring. Below is how the bucket structure +might look like. + +![](media/image2.png) + +## Cloud Build Configurations + +There are certain configurations that have to be updated that the user +has to fill while triggering Build1 for agent promotion. Following are +the variables that will be defined for the Cloud Build. + +- **\_COMMIT_MESSAGE** - This the URL for the configuring the web-hook + +### + +### Export Build: + +![](media/image3.png) + +### UAT deploy build + +![](media/image4.png) + +### Prod deploy build + +![](media/image5.png) + +## DFCX APIs + +The Python Dialogflow CX Scripting [[API (DFCX +SCRAPI)]](https://github.com/GoogleCloudPlatform/dfcx-scrapi) +is a high level API that extends the official Google [[Python Client for +Dialogflow +CX]](https://github.com/googleapis/python-dialogflow-cx). +SCRAPI makes using DFCX easier, more friendly, and more pythonic for bot +builders, developers, and maintainers. This uses V3/V3beta1 endpoints +under the hood. Since it is more pythonic way of implementation, +developers will find it easy to use SCRAPI API in action. + +In our CI/CD pipeline below operations are achieved using SCRAPI API + +- Find agent ID from name + +- Find flow id from name + +- [[Export the agent to + GCS]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/agents.py#L363) + +- [[Restore the + agent]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/agents.py#L438) + +- [[Cut a version of a + flow]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/versions.py#L183) + +- [[Deploy it to an + environment]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/environments.py#L359) + +- [[Run test + cases]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/test_cases.py#L410) + +- [[Compare environment + history]](https://github.com/GoogleCloudPlatform/dfcx-scrapi/blob/37cf8cf7b2013a377740f68d8dcb7355632161e0/src/dfcx_scrapi/core/environments.py#L392) + to find impacted flow the current instance of CI/CD builds. + +## To Set up the pipeline + +1. Setup a git or any code repository of your choice to store the code + and agent artifacts + +2. Push the code you see in the SCRAPI + repo(examples/dfcx_agent_cicd/cicd_code) in the parent folder along + with this documentation. + +3. If you fork the scrapi repo use this for your cicd pipeline then in + all yaml files inside export/UAT/Prod folders add dir: path till the + readme doc(I have marked a comment in the yaml files) + +4. If you use CSR(decommissioned for new users after 06/2024) use the + set cloudbuild_export_csr.yaml and repopush_csr.sh(ie you will use + this yaml file as trigger in the export build). But mostly likely + you would want to use github kind of repos and use the default files + cloudbuild_export.yaml and repopush.sh. For the later one you don't + need to make any configuration changes + +5. As mentioned in above point, if you use github kind of repo, you + need to create SSH key and store it in Secrets for the cloudbuild to + do checkin. Please follow this + [documentation](https://cloud.google.com/build/docs/access-github-from-build) + to create ssh and put it in the GCP secrets and generate + known_hosts.github and pushing it your repo in the same level as + this readme file. + +6. Use the config file as a one stop place to initiate values to + variables that will be used throughout the pipeline. Hence this + eases out the maintenance or reusing of the pipeline for different + values. + +{ + +\"agent_name\" : \"carrental\", + +\"dev_env_pull\" : \"ready to deploy\", + +\"uat_env_deploy\" : \"ready to test\", + +\"prod_env_deploy\" :\"deployed\", + +\"devprodsyncenv\" :\"deployed\", + +\"bucket\": \"DFCX_agent_cicd_export\", + +\"dev_project\": \"yourprojectid\", + +\"uat_project\" : \"yourprojectid\", + +\"prod_project\": \"yourprojectid\", + +\"devops_project\": \"yourprojectid\", + +\"uat_webhook_env\": \"uat\", + +\"prod_webhook_env\": \"prod\", + +\"uat_engine_id\" :\"\", + +\"prod_engine_id\" :\"\" + +} + + + +7. Make sure the a GCP bucket is created with said structure and name + is configured in config file + +8. Create 3 cloud builds with the configuration and name as shown in + screenshots in the previous section and attach your repo to these + builds. + +9. Make sure an agent is present in the same name in UAT and Prod(if it + is first time, just create an empty agent in UAT/Prod projects) + +10. Make sure the agent in UAT and Prod projects has the environments + created as configured in config file in fields uat_env_deploy and + prod_env_deploy + +11. Make sure you have also created the env as you configured in config + file devprodsyncenv in all UAT and Dev projects to sync back the + flows after deployed in prod + +## To run the Pipeline + +1. Now make some changes to your agent in Dev project and create a + version of the flow in dfcx and deploy updated flows to the DFCX + environment as you have configured in the config file dev_env_pull + field. + +2. Now come to GCP Cloud build console and click on RUN on exportbuild + in triggers section and input the commit message(basically some + lines about your change in the agent that will be used ) + +3. This will export agent and this would have done a check in in the + repo to trigger UAT and prod builds and deployed the agent in UAT + project. + +4. Now you can come back to cloud build console and build history tab + and approve the build that is waiting for your approval and you can + see that it will deploy the agent in prod post approval + ![](media/image6.png) + +## Caveat + +1. If the datastores are linked to the agent, make sure to create + datastore with ids same across all three projects + +# Benefits + +1. Entire process of agent promotion is automated + +2. Code base is modularized according to best practices + +3. DFCX best practices are configured in the pipeline set up + +4. Same pipeline can be concurrently used for same agents by multiple + agent developers to deploy their own flow and can be approved to + deploy individually as we are using commit id/SHA ID as primary + identifier across one instance of pipeline running. + +5. Datastores configurations will not break if same datastore id is + used in all the projects diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/__init__.py b/examples/dfcx_agent_cicd/cicd_code/shared/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/deployment.py b/examples/dfcx_agent_cicd/cicd_code/shared/deployment.py new file mode 100644 index 00000000..e6b11183 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/shared/deployment.py @@ -0,0 +1,278 @@ +""" Shared module to do deployement acting as a wrapper for deployment""" + +import datetime +import time +import logging +import sys +import json + +from dfcx_scrapi.core.agents import Agents +from dfcx_scrapi.core.versions import Versions +from dfcx_scrapi.core.environments import Environments +from dfcx_scrapi.core.flows import Flows + +from google.cloud.dialogflowcx_v3beta1 import types + + +from .test_case_run import RunTestCases +from .webhook_update import update_webhook +from .en_vs_other_lang import en_vs_lang + + +# logging config +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + +class Deployment: + """ + Manages the deployment and lifecycle of Dialogflow CX agents. + + This class provides methods for importing, testing, versioning, and deploying + Dialogflow CX agents across different environments. It handles tasks such as: + + - Importing agents from GCS. + - Updating webhook configurations. + - Running test cases and validating results. + - Collecting and managing flow IDs. + - Versioning and deploying flows. + - Syncing flows between environments (e.g., dev and prod). + - Updating datastore settings. + + Attributes: + (Initialized from an input dictionary) + + Methods: + import_agent: Imports an agent from GCS to a target project. + test_case_validation: Runs test cases and validates the results. + collect_flow_id: Collects the IDs of flows to be deployed. + version_count_delete: Manages version count and deletes old versions if necessary. + version_cut: Creates new versions of the specified flows. + deploy_versions: Deploys the new versions to the target environment. + dev_prod_sync: Synchronizes flows between development and production environments. + datastore_update: Updates datastore settings for the agent. + """ + def __init__(self,input_dict): + for key, value in input_dict.items(): + setattr(self, key, value) + + def import_agent(self,webhookenv): + """Imports a Dialogflow CX agent to the target project. + + This method restores a Dialogflow CX agent from a GCS bucket to the + specified target project and updates the webhook URI for the agent. + + Args: + webhookenv: The webhook environment to use for the imported agent. + """ + + agent=Agents() + target_agent_details=agent.get_agent_by_display_name( + display_name=self.agent_name, + project_id=self.target_project_id + ) + + self.target_agent_id=target_agent_details.name + + + #restoring the agent from the SHA ID folder + agent.restore_agent( + agent_id=self.target_agent_id, + gcs_bucket_uri=f"{self.sha_agent_gcs_location}/{self.agent_name}", + restore_option=2 + ) + + logging.info("import to destination project done") + + #[1.1] update webhooks uri + update_webhook(self.target_agent_id,webhookenv) + + + def test_case_validation(self): + """Runs test cases and validates the results. + + This method executes test cases for the specified agent and environment, + using tags to filter the test cases to run. If any test case fails, + the script exits with an error code. + + Raises: + SystemExit: If any test case fails. + """ + + tags=["#"+f for f in self.source_flow_names] + obj=RunTestCases( + project_id=self.target_project_id, + agent_name=self.agent_name, + environment_name=None) + stats,result=obj.trigger_test_case(tags=tags) + logging.info("test case result: %s", json.dumps(stats, indent=2)) + if not result: + sys.exit(2) + + + def collect_flow_id(self): + """Collects the IDs of flows to be deployed. + + This method retrieves the IDs of the flows specified in `self.source_flow_names` + from the target Dialogflow CX agent. It introduces a 50-second delay to allow + for agent stabilization before fetching the flow IDs. + """ + time.sleep(50) + flow=Flows() + logging.info( + "flows to deployed in %s project: %s", + self.target_project_id, + self.source_flow_names + ) + flow_ids=[] + for flow_name in self.source_flow_names: + flow_details=flow.get_flow_by_display_name( + display_name=flow_name, + agent_id=self.target_agent_id) + flow_ids.append(flow_details.name + ) + self.flow_ids=flow_ids + + def version_count_delete(self): + """ + 1. Check if the count of versions of a flow is not exceeding 20(limit) + else delete the older version + 2. and make room for new version cut + """ + versions=Versions() + for flow_id in self.flow_ids: + flowver=versions.list_versions(flow_id=flow_id) + if len(flowver)==20: + deletever=flowver[-1].name + versions.delete_version(version_id=deletever) + logging.info( + "deleted version id %s in project %s", + deletever, + self.target_project_id + ) + + def version_cut(self): + """ + 1. Cut a version of those flows + 2. Storing new version ids created + """ + versions=Versions() + vers=[] + for flow_id in self.flow_ids: + v_display_name=f"version cut by CI/CD {datetime.datetime.now()}" + ver=versions.create_version( + flow_id=flow_id, + description=self.updated_commit_message, + display_name=v_display_name + ) + vers.append(ver) + + #storing new version ids created + new_versions=[] + for ver in vers: + verresult=ver.result() + versionid=verresult.name + new_versions.append(versionid) + self.new_versions=new_versions + logging.info("versions cut in %s project",self.target_project_id) + + def deploy_versions(self): + """ + 1.Deploy created versions to the env + 2.Deploy the version created to this env id + """ + env=Environments() + # get env id + env_details=env.get_environment_by_display_name( + display_name=self.target_environment_name, + agent_id=self.target_agent_id + ) + self.target_env_id=env_details.name + + # deploy the version created to this env id + + for new_version in self.new_versions: + env.deploy_flow_to_environment( + environment_id=self.target_env_id, + flow_version=new_version) + + logging.info("versions deployed to deployed env %s project", + self.target_project_id + ) + + def dev_prod_sync(self): + """ + sync the dev and prod project once deployment happens in prod + 1. Deploy created versions to the env + 2. Deploy the version created to this env id + """ + agent=Agents() + dev_agent_details=agent.get_agent_by_display_name( + display_name=self.agent_name, + project_id=self.dev_project + ) + + dev_agent_id=dev_agent_details.name + env=Environments() + # get env id + env_details=env.get_environment_by_display_name( + display_name=self.devprodsyncenv, + agent_id=dev_agent_id + ) + self.devprod_env_id=env_details.name + + # deploy the version created to this env id + + for new_version in self.impacted_version_ids: + env.deploy_flow_to_environment( + environment_id=self.devprod_env_id, + flow_version=new_version) + + logging.info("flows deployed in prod is synced with dev environment") + + def datastore_update(self,projectlevel): + """ + update the datastore id + """ + if projectlevel=="uat": + engine_id=self.uat_engine_id + elif projectlevel=="uat": + engine_id=self.prod_engine_id + else: + engine_id="" + agents=Agents() + app=types.Agent.GenAppBuilderSettings(engine=engine_id) + kwargs={"gen_app_builder_settings":app} + agents.update_agent(agent_id=self.target_agent_id,**kwargs) + + logging.info("datastore id updated") + + def fullfillment_lang_check(self,lang): + """Checks fulfillment language coverage compared to English. + + This method compares the fulfillment coverage of the specified language + (`lang`) with the English language ('en') for the given agent and flows. + It returns dataframes containing statistics on fulfillment entries, parameters, + and routes, along with a boolean result indicating whether all elements have + agent responses in the specified language. + + Args: + lang: The language code to compare against English (e.g., 'fr-ca'). + + Returns: + A tuple containing: + - entry_df: DataFrame with statistics on entry fulfillment coverage. + - param_df: DataFrame with statistics on parameter fulfillment coverage. + - route_df: DataFrame with statistics on route fulfillment coverage. + - result: A boolean indicating if all elements have agent responses in the specified language. + """ + + entry_df,param_df,route_df,result= en_vs_lang( + self.target_agent_id, + self.source_flow_names, + lang + ) + return entry_df,param_df,route_df,result + diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/en_vs_other_lang.py b/examples/dfcx_agent_cicd/cicd_code/shared/en_vs_other_lang.py new file mode 100644 index 00000000..7577507f --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/shared/en_vs_other_lang.py @@ -0,0 +1,132 @@ +""" Compare the fullfillments of en vs french langauge or etc""" +from dfcx_scrapi.core.flows import Flows + +import pandas as pd + +from .fullfillment_helper import get_entry_ff,get_param_ff,get_route_ff +from .fullfillment_helper import PagesChild + +def en_vs_lang(agent_id,flows,lang): + """Compares fulfillment coverage between English and a specified language. + + This function analyzes the fulfillment configurations (entry fulfillments, + parameter fulfillments, and route fulfillments) for a given Dialogflow CX agent + and a set of flows. It compares the coverage of the specified language (`lang`) + with the English language ('en'), generating dataframes that highlight any + discrepancies in fulfillment setup. + + Args: + agent_id: The ID of the Dialogflow CX agent. + flows: A list of flow display names to analyze. + lang: The language code to compare against English (e.g., 'fr-ca'). + + Returns: + A tuple containing: + - entry_df: DataFrame with statistics on entry fulfillment coverage. + - param_df: DataFrame with statistics on parameter fulfillment coverage. + - route_df: DataFrame with statistics on route fulfillment coverage. + - result: A boolean indicating if all elements have agent responses + in the specified language. + """ + entry_columns = ['flow','page', 'text_entry_en', f'text_entry_{lang}', + 'payload_entry_en', f'payload_entry_{lang}'] + entry_df = pd.DataFrame(columns=entry_columns) + params_columns =['flow','page','parameter','text_param_en', + f'text_param_{lang}','payload_param_en', + f'payload_param_{lang}'] + param_df = pd.DataFrame(columns=params_columns) + route_columns=['flow','page','route','text_route_en', + f'text_route_{lang}', 'payload_route_en', + f'payload_route_{lang}'] + route_df = pd.DataFrame(columns=route_columns) + flowobj=Flows() + pagesobj=PagesChild() + for flow in flows: + flow_details=flowobj.get_flow_by_display_name(display_name=flow, + agent_id=agent_id) + flow_id=flow_details.name + pages_list=pagesobj.list_pages(flow_id=flow_id) + + for page in pages_list: + page_name=page.display_name + p_entry_en=0 + t_entry_en=0 + #getting entry fullfillment details + p_entry_en,t_entry_en=get_entry_ff(page=page,language_code='en') + + if p_entry_en >0 or t_entry_en >0: + p_entry_lang,t_entry_lang=get_entry_ff( + page_id=page.name, + language_code=lang) + new_row = pd.DataFrame({ + 'flow': [flow], + 'page': [page_name], + 'text_entry_en':[t_entry_en] , + f'text_entry_{lang}': [t_entry_lang], + 'payload_entry_en':[p_entry_en], + f'payload_entry_{lang}': [p_entry_lang] + }) + entry_df = pd.concat([entry_df, new_row], ignore_index=True) + + #getting fullfillemnt in Parameters + for idx,param in enumerate(page.form.parameters): + param_name=param.display_name + p_param_en,t_param_en=get_param_ff(param=param,language_code='en') + if p_param_en> 0 or t_param_en >0: + p_param_lang,t_param_lang=get_param_ff(page_id=page.name, + idx=idx, + language_code='fr-ca') + + new_row = pd.DataFrame({ + 'flow': [flow], + 'page': [page_name], + 'parameter' : [param_name], + 'text_param_en':[t_param_en] , + f'text_param_{lang}': [t_param_lang], + 'payload_param_en':[p_param_en], + f'payload_param_{lang}': [p_param_lang] + }) + param_df = pd.concat([param_df, new_row], ignore_index=True) + + #getting fullfillment details in page routes + for idx,route in enumerate(page.transition_routes): + route_name=route.name + p_route_en,t_route_en=get_route_ff(route=route,language_code='en') + if p_route_en>0 or t_route_en>0: + p_route_lang,t_route_lang=get_route_ff(page_id=page.name, + idx=idx, + language_code='fr-ca') + + new_row = pd.DataFrame({ + 'flow': [flow], + 'page': [page_name], + 'route' : [route_name], + 'text_route_en':[t_route_en] , + f'text_route_{lang}': [t_route_lang], + 'payload_route_en':[p_route_en], + f'payload_route_{lang}': [p_route_lang] + }) + route_df=pd.concat([route_df, new_row], + ignore_index=True) + condition1 = ( + (entry_df.iloc[:, 2] != entry_df.iloc[:, 3]) | + (entry_df.iloc[:, 4] != entry_df.iloc[:, 5]) + ) + condition2 = ( + (param_df.iloc[:, 3] != param_df.iloc[:, 4]) | + (param_df.iloc[:, 5] != param_df.iloc[:, 6]) + ) + condition3 =( + (route_df.iloc[:, 3] != route_df.iloc[:, 4]) | + (route_df.iloc[:, 5] != route_df.iloc[:, 6]) + ) + + result1 = entry_df[condition1] + result2 = param_df[condition2] + result3 = route_df[condition3] + if result1.empty and result2.empty and result3.empty: + result=True + else: + result=False + + return entry_df,param_df,route_df,result diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/fullfillment_helper.py b/examples/dfcx_agent_cicd/cicd_code/shared/fullfillment_helper.py new file mode 100644 index 00000000..6b70836f --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/shared/fullfillment_helper.py @@ -0,0 +1,111 @@ +""" Helper functions for en vs lang""" + +from dfcx_scrapi.core.pages import Pages + +from google.cloud.dialogflowcx_v3beta1.services import pages +from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page + +class PagesChild(Pages): + """ + Iterates over the pages object to get the fullfillment details + """ + def __init__(self,*args,**kwargs): + super().__init__(*args,**kwargs) + + def get_page(self, page_id,language_code) -> gcdc_page.Page: + """Get a single CX Page object based on the provided Page ID. + + Args: + page_id: a properly formatted CX Page ID + + Returns: + A single CX Page Object + """ + if not page_id: + page_id = self.page_id + request = gcdc_page.GetPageRequest() + request.name=page_id + request.language_code = language_code + client_options = self._set_region(page_id) + client = pages.PagesClient( + credentials=self.creds, client_options=client_options + ) + + response = client.get_page(request) + + return response + + + +def get_entry_ff(page=None,page_id=None,language_code='en'): + """ + Returns entry fullfillments stats + """ + if not page: + pagesobj=PagesChild() + page=pagesobj.get_page(page_id=page_id,language_code=language_code) + + payloadc=0 + textc=0 + for i in page.entry_fulfillment.messages: + try: + temp=len(i.payload.items()) + payloadc=payloadc+temp + except Exception: + pass + try: + temp=len(i.text.text) + textc=textc+temp + except Exception: + pass + + return payloadc,textc + +def get_param_ff(param=None,page_id=None,idx=None,language_code='en'): + """ + Returns params fullfillments stats + """ + if not param: + pagesobj=PagesChild() + page=pagesobj.get_page(page_id=page_id,language_code=language_code) + param=page.form.parameters[idx] + payloadc=0 + textc=0 + for message in param.fill_behavior.initial_prompt_fulfillment.messages: + try: + temp=len(message.payload.items()) + payloadc=payloadc+temp + except Exception: + pass + try: + temp=len(message.text.text) + textc=textc+temp + except Exception: + pass + + return payloadc,textc + +def get_route_ff(route=None,page_id=None,idx=None,language_code='en'): + """ + Returns route fullfillments stats + """ + if not route: + pagesobj=PagesChild() + page=pagesobj.get_page(page_id=page_id,language_code=language_code) + route=page.transition_routes[idx] + payloadc=0 + textc=0 + for i in route.trigger_fulfillment.messages: + try: + temp=len(i.payload.items()) + payloadc=payloadc+temp + except Exception: + pass + try: + temp=len(i.text.text) + textc=textc+temp + except Exception: + pass + + + return payloadc,textc diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/test_case_run.py b/examples/dfcx_agent_cicd/cicd_code/shared/test_case_run.py new file mode 100644 index 00000000..a1b4da64 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/shared/test_case_run.py @@ -0,0 +1,120 @@ +""" Running test cases and produce results""" + +from typing import Tuple, Dict +import logging + +from dfcx_scrapi.core.test_cases import TestCases +from dfcx_scrapi.core.agents import Agents +from dfcx_scrapi.core.environments import Environments + +# logging config +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + +class RunTestCases: + """ + Manages and executes test cases for Dialogflow CX agents. + + This class provides functionality to run test cases against a specified + Dialogflow CX agent and environment. It retrieves the necessary agent and + environment information and allows triggering test cases with optional tag filtering. + + Attributes: + project_id: The ID of the Google Cloud project where the agent resides. + agent_name: The display name of the agent. + environment_name: The display name of the agent's environment (can be None). + + Methods: + triggerTestcase: Executes test cases for the agent, optionally filtered by tags. + """ + + def __init__( + self,project_id, + agent_name, + environment_name + ): + + self.project_id=project_id + self.agent_name=agent_name + self.environment_name=environment_name + + agents=Agents() + env=Environments() + + agent_details=agents.get_agent_by_display_name( + display_name=self.agent_name, + project_id=self.project_id) + + self.agent_id=agent_details.name + + #get environment id + if self.environment_name: + env_details=env.get_environment_by_display_name( + display_name=self.environment_name, + agent_id=self.agent_id) + self.env_id=env_details.name + else: + self.env_id=None + + def trigger_test_case( + self, + tags, + agent_id=None, + env_id=None) -> Tuple[Dict[str, int], bool] : + """ + Function to trigger the test case module in dfcx + Returns: + Result: Dict of results + boolean mentioning test case status + """ + if not agent_id: + agent_id=self.agent_id + if not env_id: + env_id=self.env_id + tc=TestCases() + tc_list=tc.list_test_cases(agent_id=agent_id) + + #get test cases + try: + filtered_tc = [ + testcase + for testcase in tc_list + if any( + tag in testcase + for tag in tags + ) + ] + + except AttributeError as e: + print( + f"Test case not found to run due to error {e}. " + "UAT deployment will be done without test case validation" + ) + result={"Pass": 0, "Fail": 0} + return result, True + filtered_tc_id=[filtestcase.name for filtestcase in filtered_tc] + print(filtered_tc_id) + + #run the test cases + tc_result=tc.batch_run_test_cases(test_cases=filtered_tc_id, + agent_id=agent_id, + environment=env_id) + print(f"test case results {tc_result}") + + pass_count=0 + fail_count=0 + for result in tc_result.results: + if result.test_result==1: + pass_count+=1 + else: + fail_count+=1 + + print(f"Pass: {pass_count}, Fail: {fail_count}") + result={"Pass": pass_count, "Fail": fail_count} + + if fail_count>0: + return result,False + return result,True diff --git a/examples/dfcx_agent_cicd/cicd_code/shared/webhook_update.py b/examples/dfcx_agent_cicd/cicd_code/shared/webhook_update.py new file mode 100644 index 00000000..1f9feba2 --- /dev/null +++ b/examples/dfcx_agent_cicd/cicd_code/shared/webhook_update.py @@ -0,0 +1,31 @@ +""" Functions to update the webhook env""" +import logging +import re + +from dfcx_scrapi.core.webhooks import Webhooks + + +# logging config +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) +web=Webhooks() + +def update_webhook(agent_id,env): + """ + Updates the environment portion in the apigee webhook end point + """ + weblist=web.list_webhooks(agent_id=agent_id) + logging.info("got the webhooklist") + + for webhook in weblist: + currenturi=webhook.generic_web_service.uri + pattern = re.compile(r"\bdev\b") + updateduri=re.sub(pattern, env, currenturi) + webhook.generic_web_service.uri=updateduri + kwargs={"generic_web_service":webhook.generic_web_service} + web.update_webhook(webhook_id=webhook.name, + webhook_obj=webhook,**kwargs) + logging.info("replaced dev to %s and updated all the webhook urls",env)