From 062d144d228bd3e8c4af80eaf86a51560d7e7acf Mon Sep 17 00:00:00 2001
From: Wang Yan <wangyan@vmware.com>
Date: Wed, 22 Nov 2023 12:51:03 +0800
Subject: [PATCH 1/8] add permission validation for robot creating and
 updating. (#19598)

* add permission validation for robot creating and updating.

It is not allowed to create an new robot with the access outside the predefined scope.

Signed-off-by: wang yan <wangyan@vmware.com>

* Fix robot testcase and update robot permission metadata (#167)

1. Fix robot testcase
2. update robot permission metadata

Signed-off-by: Yang Jiao <jiaoya@vmware.com>
Signed-off-by: wang yan <wangyan@vmware.com>

---------

Signed-off-by: wang yan <wangyan@vmware.com>
Signed-off-by: Yang Jiao <jiaoya@vmware.com>
Co-authored-by: Yang Jiao <72076317+YangJiao0817@users.noreply.github.com>
---
 src/common/rbac/const.go                    | 43 ++++++------
 src/server/v2.0/handler/robot.go            | 32 +++++++++
 src/server/v2.0/handler/robot_test.go       | 78 +++++++++++++++++++++
 tests/apitests/python/library/robot.py      | 18 +----
 tests/apitests/python/test_robot_account.py | 20 +++---
 5 files changed, 145 insertions(+), 46 deletions(-)

diff --git a/src/common/rbac/const.go b/src/common/rbac/const.go
index 32dec240b3a..1ae653bccb8 100644
--- a/src/common/rbac/const.go
+++ b/src/common/rbac/const.go
@@ -85,11 +85,11 @@ var (
 		"System": {
 			{Resource: ResourceAuditLog, Action: ActionList},
 
-			{Resource: ResourcePreatPolicy, Action: ActionRead},
-			{Resource: ResourcePreatPolicy, Action: ActionCreate},
-			{Resource: ResourcePreatPolicy, Action: ActionDelete},
-			{Resource: ResourcePreatPolicy, Action: ActionList},
-			{Resource: ResourcePreatPolicy, Action: ActionUpdate},
+			{Resource: ResourcePreatInstance, Action: ActionRead},
+			{Resource: ResourcePreatInstance, Action: ActionCreate},
+			{Resource: ResourcePreatInstance, Action: ActionDelete},
+			{Resource: ResourcePreatInstance, Action: ActionList},
+			{Resource: ResourcePreatInstance, Action: ActionUpdate},
 
 			{Resource: ResourceProject, Action: ActionList},
 			{Resource: ResourceProject, Action: ActionCreate},
@@ -123,14 +123,12 @@ var (
 
 			{Resource: ResourceGarbageCollection, Action: ActionRead},
 			{Resource: ResourceGarbageCollection, Action: ActionCreate},
-			{Resource: ResourceGarbageCollection, Action: ActionDelete},
 			{Resource: ResourceGarbageCollection, Action: ActionList},
 			{Resource: ResourceGarbageCollection, Action: ActionUpdate},
 			{Resource: ResourceGarbageCollection, Action: ActionStop},
 
 			{Resource: ResourcePurgeAuditLog, Action: ActionRead},
 			{Resource: ResourcePurgeAuditLog, Action: ActionCreate},
-			{Resource: ResourcePurgeAuditLog, Action: ActionDelete},
 			{Resource: ResourcePurgeAuditLog, Action: ActionList},
 			{Resource: ResourcePurgeAuditLog, Action: ActionUpdate},
 			{Resource: ResourcePurgeAuditLog, Action: ActionStop},
@@ -138,12 +136,6 @@ var (
 			{Resource: ResourceJobServiceMonitor, Action: ActionList},
 			{Resource: ResourceJobServiceMonitor, Action: ActionStop},
 
-			{Resource: ResourceTagRetention, Action: ActionRead},
-			{Resource: ResourceTagRetention, Action: ActionCreate},
-			{Resource: ResourceTagRetention, Action: ActionDelete},
-			{Resource: ResourceTagRetention, Action: ActionList},
-			{Resource: ResourceTagRetention, Action: ActionUpdate},
-
 			{Resource: ResourceScanner, Action: ActionRead},
 			{Resource: ResourceScanner, Action: ActionCreate},
 			{Resource: ResourceScanner, Action: ActionDelete},
@@ -156,16 +148,17 @@ var (
 			{Resource: ResourceLabel, Action: ActionList},
 			{Resource: ResourceLabel, Action: ActionUpdate},
 
-			{Resource: ResourceExportCVE, Action: ActionRead},
-			{Resource: ResourceExportCVE, Action: ActionCreate},
-
 			{Resource: ResourceSecurityHub, Action: ActionRead},
 			{Resource: ResourceSecurityHub, Action: ActionList},
 
 			{Resource: ResourceCatalog, Action: ActionRead},
 		},
 		"Project": {
-			{Resource: ResourceLog, Action: ActionList},
+			{Resource: ResourceLabel, Action: ActionRead},
+			{Resource: ResourceLabel, Action: ActionCreate},
+			{Resource: ResourceLabel, Action: ActionDelete},
+			{Resource: ResourceLabel, Action: ActionList},
+			{Resource: ResourceLabel, Action: ActionUpdate},
 
 			{Resource: ResourceProject, Action: ActionRead},
 			{Resource: ResourceProject, Action: ActionDelete},
@@ -178,9 +171,11 @@ var (
 			{Resource: ResourceMetadata, Action: ActionUpdate},
 
 			{Resource: ResourceRepository, Action: ActionRead},
-			{Resource: ResourceRepository, Action: ActionCreate},
-			{Resource: ResourceRepository, Action: ActionList},
 			{Resource: ResourceRepository, Action: ActionUpdate},
+			{Resource: ResourceRepository, Action: ActionDelete},
+			{Resource: ResourceRepository, Action: ActionList},
+			{Resource: ResourceRepository, Action: ActionPull},
+			{Resource: ResourceRepository, Action: ActionPush},
 
 			{Resource: ResourceArtifact, Action: ActionRead},
 			{Resource: ResourceArtifact, Action: ActionCreate},
@@ -216,13 +211,19 @@ var (
 			{Resource: ResourceImmutableTag, Action: ActionList},
 			{Resource: ResourceImmutableTag, Action: ActionUpdate},
 
+			{Resource: ResourceTagRetention, Action: ActionRead},
+			{Resource: ResourceTagRetention, Action: ActionCreate},
+			{Resource: ResourceTagRetention, Action: ActionDelete},
+			{Resource: ResourceTagRetention, Action: ActionList},
+			{Resource: ResourceTagRetention, Action: ActionUpdate},
+
+			{Resource: ResourceLog, Action: ActionList},
+
 			{Resource: ResourceNotificationPolicy, Action: ActionRead},
 			{Resource: ResourceNotificationPolicy, Action: ActionCreate},
 			{Resource: ResourceNotificationPolicy, Action: ActionDelete},
 			{Resource: ResourceNotificationPolicy, Action: ActionList},
 			{Resource: ResourceNotificationPolicy, Action: ActionUpdate},
-
-			{Resource: ResourceRegistry, Action: ActionPush},
 		},
 	}
 )
diff --git a/src/server/v2.0/handler/robot.go b/src/server/v2.0/handler/robot.go
index 2a5acda1ecd..a98579e8739 100644
--- a/src/server/v2.0/handler/robot.go
+++ b/src/server/v2.0/handler/robot.go
@@ -32,6 +32,7 @@ import (
 	"github.com/goharbor/harbor/src/lib/config"
 	"github.com/goharbor/harbor/src/lib/errors"
 	"github.com/goharbor/harbor/src/lib/log"
+	"github.com/goharbor/harbor/src/pkg/permission/types"
 	pkg "github.com/goharbor/harbor/src/pkg/robot/model"
 	"github.com/goharbor/harbor/src/server/v2.0/handler/model"
 	"github.com/goharbor/harbor/src/server/v2.0/models"
@@ -296,6 +297,28 @@ func (rAPI *robotAPI) validate(d int64, level string, permissions []*models.Robo
 	if level == robot.LEVELPROJECT && len(permissions) > 1 {
 		return errors.New(nil).WithMessage("bad request permission").WithCode(errors.BadRequestCode)
 	}
+
+	// to validate the access scope
+	for _, perm := range permissions {
+		if perm.Kind == robot.LEVELSYSTEM {
+			polices := rbac.PoliciesMap["System"]
+			for _, acc := range perm.Access {
+				if !containsAccess(polices, acc) {
+					return errors.New(nil).WithMessage("bad request permission: %s:%s", acc.Resource, acc.Action).WithCode(errors.BadRequestCode)
+				}
+			}
+		} else if perm.Kind == robot.LEVELPROJECT {
+			polices := rbac.PoliciesMap["Project"]
+			for _, acc := range perm.Access {
+				if !containsAccess(polices, acc) {
+					return errors.New(nil).WithMessage("bad request permission: %s:%s", acc.Resource, acc.Action).WithCode(errors.BadRequestCode)
+				}
+			}
+		} else {
+			return errors.New(nil).WithMessage("bad request permission level: %s", perm.Kind).WithCode(errors.BadRequestCode)
+		}
+	}
+
 	return nil
 }
 
@@ -364,3 +387,12 @@ func validateName(name string) error {
 	}
 	return nil
 }
+
+func containsAccess(policies []*types.Policy, item *models.Access) bool {
+	for _, po := range policies {
+		if po.Resource.String() == item.Resource && po.Action.String() == item.Action {
+			return true
+		}
+	}
+	return false
+}
diff --git a/src/server/v2.0/handler/robot_test.go b/src/server/v2.0/handler/robot_test.go
index 88423b6fff5..fe86af26058 100644
--- a/src/server/v2.0/handler/robot_test.go
+++ b/src/server/v2.0/handler/robot_test.go
@@ -1,6 +1,8 @@
 package handler
 
 import (
+	"github.com/goharbor/harbor/src/common/rbac"
+	"github.com/goharbor/harbor/src/server/v2.0/models"
 	"math"
 	"testing"
 )
@@ -129,3 +131,79 @@ func TestValidateName(t *testing.T) {
 		})
 	}
 }
+
+func TestContainsAccess(t *testing.T) {
+	system := rbac.PoliciesMap["System"]
+	systests := []struct {
+		name     string
+		acc      *models.Access
+		expected bool
+	}{
+		{"System ResourceRegistry push",
+			&models.Access{
+				Resource: rbac.ResourceRegistry.String(),
+				Action:   rbac.ActionPush.String(),
+			},
+			false,
+		},
+		{"System ResourceProject delete",
+			&models.Access{
+				Resource: rbac.ResourceProject.String(),
+				Action:   rbac.ActionDelete.String(),
+			},
+			false,
+		},
+		{"System ResourceReplicationPolicy delete",
+			&models.Access{
+				Resource: rbac.ResourceReplicationPolicy.String(),
+				Action:   rbac.ActionDelete.String(),
+			},
+			true,
+		},
+	}
+	for _, tt := range systests {
+		t.Run(tt.name, func(t *testing.T) {
+			ok := containsAccess(system, tt.acc)
+			if ok != tt.expected {
+				t.Errorf("name: %s, containsAccess() = %#v, want %#v", tt.name, tt.acc, tt.expected)
+			}
+		})
+	}
+
+	project := rbac.PoliciesMap["Project"]
+	protests := []struct {
+		name     string
+		acc      *models.Access
+		expected bool
+	}{
+		{"Project ResourceLog delete",
+			&models.Access{
+				Resource: rbac.ResourceLog.String(),
+				Action:   rbac.ActionDelete.String(),
+			},
+			false,
+		},
+		{"Project ResourceMetadata read",
+			&models.Access{
+				Resource: rbac.ResourceMetadata.String(),
+				Action:   rbac.ActionRead.String(),
+			},
+			true,
+		},
+		{"Project ResourceRobot create",
+			&models.Access{
+				Resource: rbac.ResourceRobot.String(),
+				Action:   rbac.ActionCreate.String(),
+			},
+			false,
+		},
+	}
+	for _, tt := range protests {
+		t.Run(tt.name, func(t *testing.T) {
+			ok := containsAccess(project, tt.acc)
+			if ok != tt.expected {
+				t.Errorf("name: %s, containsAccess() = %#v, want %#v", tt.name, tt.acc, tt.expected)
+			}
+		})
+	}
+}
diff --git a/tests/apitests/python/library/robot.py b/tests/apitests/python/library/robot.py
index 3ef9b9d113c..53bde5be30a 100644
--- a/tests/apitests/python/library/robot.py
+++ b/tests/apitests/python/library/robot.py
@@ -21,8 +21,8 @@ def list_robot(self, expect_status_code = 200, **kwargs):
             base._assert_status_code(200, status_code)
             return body
 
-    def create_access_list(self, right_map = [True] * 10):
-        _assert_status_code(10, len(right_map), r"Please input full access list for system robot account. Expected {}, while actual input count is {}.")
+    def create_access_list(self, right_map = [True] * 7):
+        _assert_status_code(7, len(right_map), r"Please input full access list for system robot account. Expected {}, while actual input count is {}.")
         action_pull = "pull"
         action_push = "push"
         action_read = "read"
@@ -33,9 +33,6 @@ def create_access_list(self, right_map = [True] * 10):
             ("repository", action_pull),
             ("repository", action_push),
             ("artifact", action_del),
-            ("helm-chart", action_read),
-            ("helm-chart-version", action_create),
-            ("helm-chart-version", action_del),
             ("tag", action_create),
             ("tag", action_del),
             ("artifact-label", action_create),
@@ -50,8 +47,7 @@ def create_access_list(self, right_map = [True] * 10):
         return access_list
 
     def create_project_robot(self, project_name, duration, robot_name = None, robot_desc = None,
-            has_pull_right = True,  has_push_right = True, has_chart_read_right = True,
-            has_chart_create_right = True, expect_status_code = 201, expect_response_body = None,
+            has_pull_right = True, has_push_right = True, expect_status_code = 201, expect_response_body = None,
             **kwargs):
         if robot_name is None:
             robot_name = base._random_name("robot")
@@ -62,20 +58,12 @@ def create_project_robot(self, project_name, duration, robot_name = None, robot_
         access_list = []
         action_pull = "pull"
         action_push = "push"
-        action_read = "read"
-        action_create = "create"
         if has_pull_right is True:
             robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_pull)
             access_list.append(robotAccountAccess)
         if has_push_right is True:
             robotAccountAccess = v2_swagger_client.Access(resource = "repository", action = action_push)
             access_list.append(robotAccountAccess)
-        if has_chart_read_right is True:
-            robotAccountAccess = v2_swagger_client.Access(resource = "helm-chart", action = action_read)
-            access_list.append(robotAccountAccess)
-        if has_chart_create_right is True:
-            robotAccountAccess = v2_swagger_client.Access(resource = "helm-chart-version", action = action_create)
-            access_list.append(robotAccountAccess)
 
         robotaccountPermissions = v2_swagger_client.RobotPermission(kind = "project", namespace = project_name, access = access_list)
         permission_list = []
diff --git a/tests/apitests/python/test_robot_account.py b/tests/apitests/python/test_robot_account.py
index 5f49bf88515..ddc6cbc6143 100644
--- a/tests/apitests/python/test_robot_account.py
+++ b/tests/apitests/python/test_robot_account.py
@@ -162,7 +162,7 @@ def verify_repository_unpushable(self, project_access_list, system_ra_client, ex
                 expected_error_message = expected_error_message
             )
 
-    def test_02_SystemlevelRobotAccount(self):
+    def Atest_02_SystemlevelRobotAccount(self):
         """
         Test case:
             Robot Account
@@ -194,10 +194,10 @@ def test_02_SystemlevelRobotAccount(self):
         # In this priviledge check list, make sure that each of lines and rows must
         #   contains both True and False value.
         check_list = [
-            [True, True, True, True, True, True, False, True, False, True],
-            [False, False, False, False, True, True, False, True, True, False],
-            [True, False, True, False, True, False, True, False, True, True],
-            [False, False, False, True, False, True, False, True, True, False]
+            [True, True, True, False, True, False, True],
+            [False, False, False, False, True, True, False],
+            [True, False, True, True, False, True, True],
+            [False, False, False, False, True, True, False]
         ]
         access_list_list = []
         for i in range(len(check_list)):
@@ -240,12 +240,12 @@ def test_02_SystemlevelRobotAccount(self):
 
             repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_tag", "latest_1")
             self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **ADMIN_CLIENT)
-            if project_access["check_list"][6]:    #---tag:create---
+            if project_access["check_list"][3]:    #---tag:create---
                 self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", **SYSTEM_RA_CLIENT)
             else:
                 self.artifact.create_tag(project_access["project_name"], repo_name.split('/')[1], tag, "1.0", expect_status_code = 403, **SYSTEM_RA_CLIENT)
 
-            if project_access["check_list"][7]:    #---tag:delete---
+            if project_access["check_list"][4]:    #---tag:delete---
                 self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", **SYSTEM_RA_CLIENT)
             else:
                 self.artifact.delete_tag(project_access["project_name"], repo_name.split('/')[1], tag, "for_delete", expect_status_code = 403, **SYSTEM_RA_CLIENT)
@@ -253,12 +253,12 @@ def test_02_SystemlevelRobotAccount(self):
             repo_name, tag = push_self_build_image_to_project(project_access["project_name"], harbor_server, ADMIN_CLIENT["username"], ADMIN_CLIENT["password"], "test_create_artifact_label", "latest_1")
             #Add project level label to artifact
             label_id, _ = self.label.create_label(project_id = project_access["project_id"], scope = "p", **ADMIN_CLIENT)
-            if project_access["check_list"][8]:    #---artifact-label:create---
+            if project_access["check_list"][5]:    #---artifact-label:create---
                 self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), **SYSTEM_RA_CLIENT)
             else:
                 self.artifact.add_label_to_reference(project_access["project_name"], repo_name.split('/')[1], tag, int(label_id), expect_status_code = 403, **SYSTEM_RA_CLIENT)
 
-            if project_access["check_list"][9]:    #---scan:create---
+            if project_access["check_list"][6]:    #---scan:create---
                 self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, **SYSTEM_RA_CLIENT)
             else:
                 self.scan.scan_artifact(project_access["project_name"], repo_name.split('/')[1], tag, expect_status_code = 403, **SYSTEM_RA_CLIENT)
@@ -325,7 +325,7 @@ def test_02_SystemlevelRobotAccount(self):
         self.verify_repository_unpushable(project_access_list, SYSTEM_RA_CLIENT)
 
         #20. Add a system robot account with all projects coverd;
-        all_true_access_list= self.robot.create_access_list( [True] * 10 )
+        all_true_access_list= self.robot.create_access_list( [True] * 7 )
         robot_account_Permissions_list = []
         robot_account_Permissions = v2_swagger_client.RobotPermission(kind = "project", namespace = "*", access = all_true_access_list)
         robot_account_Permissions_list.append(robot_account_Permissions)

From 987596583764974dd16e009d0be807e1e9791e36 Mon Sep 17 00:00:00 2001
From: MinerYang <yminer@vmware.com>
Date: Wed, 22 Nov 2023 13:58:45 +0800
Subject: [PATCH 2/8] add prepare migration script for 2.10 (#19600)

Signed-off-by: yminer <yminer@vmware.com>
Co-authored-by: Wang Yan <wangyan@vmware.com>
---
 make/harbor.yml.tmpl                          |   2 +-
 make/photon/prepare/commands/migrate.py       |   2 +-
 make/photon/prepare/migrations/__init__.py    |   2 +-
 .../migrations/version_2_10_0/__init__.py     |  21 +
 .../version_2_10_0/harbor.yml.jinja           | 666 ++++++++++++++++++
 5 files changed, 690 insertions(+), 3 deletions(-)
 create mode 100644 make/photon/prepare/migrations/version_2_10_0/__init__.py
 create mode 100644 make/photon/prepare/migrations/version_2_10_0/harbor.yml.jinja

diff --git a/make/harbor.yml.tmpl b/make/harbor.yml.tmpl
index 7ab922bb81d..a4b55ccf2be 100644
--- a/make/harbor.yml.tmpl
+++ b/make/harbor.yml.tmpl
@@ -153,7 +153,7 @@ log:
   #   port: 5140
 
 #This attribute is for migrator to detect the version of the .cfg file, DO NOT MODIFY!
-_version: 2.9.0
+_version: 2.10.0
 
 # Uncomment external_database if using external database.
 # external_database:
diff --git a/make/photon/prepare/commands/migrate.py b/make/photon/prepare/commands/migrate.py
index 62e41643ae4..6808aa18f6f 100644
--- a/make/photon/prepare/commands/migrate.py
+++ b/make/photon/prepare/commands/migrate.py
@@ -10,7 +10,7 @@
 @click.command()
 @click.option('-i', '--input', 'input_', required=True, help="The path of original config file")
 @click.option('-o', '--output', default='', help="the path of output config file")
-@click.option('-t', '--target', default='2.9.0', help="target version of input path")
+@click.option('-t', '--target', default='2.10.0', help="target version of input path")
 def migrate(input_, output, target):
     """
     migrate command will migrate config file style to specific version
diff --git a/make/photon/prepare/migrations/__init__.py b/make/photon/prepare/migrations/__init__.py
index 69cdc8314fb..4ecb468a373 100644
--- a/make/photon/prepare/migrations/__init__.py
+++ b/make/photon/prepare/migrations/__init__.py
@@ -2,4 +2,4 @@
 
 MIGRATION_BASE_DIR = os.path.dirname(__file__)
 
-accept_versions = {'1.9.0', '1.10.0', '2.0.0', '2.1.0', '2.2.0', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0'}
\ No newline at end of file
+accept_versions = {'1.9.0', '1.10.0', '2.0.0', '2.1.0', '2.2.0', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0','2.10.0'}
\ No newline at end of file
diff --git a/make/photon/prepare/migrations/version_2_10_0/__init__.py b/make/photon/prepare/migrations/version_2_10_0/__init__.py
new file mode 100644
index 00000000000..64c148b6c17
--- /dev/null
+++ b/make/photon/prepare/migrations/version_2_10_0/__init__.py
@@ -0,0 +1,21 @@
+import os
+from jinja2 import Environment, FileSystemLoader, StrictUndefined, select_autoescape
+from utils.migration import read_conf
+
+revision = '2.10.0'
+down_revisions = ['2.9.0']
+
+def migrate(input_cfg, output_cfg):
+    current_dir = os.path.dirname(__file__)
+    tpl = Environment(
+        loader=FileSystemLoader(current_dir),
+        undefined=StrictUndefined,
+        trim_blocks=True,
+        lstrip_blocks=True,
+        autoescape = select_autoescape()
+        ).get_template('harbor.yml.jinja')
+
+    config_dict = read_conf(input_cfg)
+
+    with open(output_cfg, 'w') as f:
+        f.write(tpl.render(**config_dict))
diff --git a/make/photon/prepare/migrations/version_2_10_0/harbor.yml.jinja b/make/photon/prepare/migrations/version_2_10_0/harbor.yml.jinja
new file mode 100644
index 00000000000..1ab5ee9067a
--- /dev/null
+++ b/make/photon/prepare/migrations/version_2_10_0/harbor.yml.jinja
@@ -0,0 +1,666 @@
+# Configuration file of Harbor
+
+# The IP address or hostname to access admin UI and registry service.
+# DO NOT use localhost or 127.0.0.1, because Harbor needs to be accessed by external clients.
+hostname: {{ hostname }}
+
+# http related config
+{% if http is defined %}
+http:
+  # port for http, default is 80. If https enabled, this port will redirect to https port
+  port: {{ http.port }}
+{% else %}
+# http:
+#   # port for http, default is 80. If https enabled, this port will redirect to https port
+#   port: 80
+{% endif %}
+
+{% if https is defined %}
+# https related config
+https:
+  # https port for harbor, default is 443
+  port: {{ https.port }}
+  # The path of cert and key files for nginx
+  certificate: {{ https.certificate }}
+  private_key: {{ https.private_key }}
+{% else %}
+# https related config
+# https:
+#   # https port for harbor, default is 443
+#   port: 443
+#   # The path of cert and key files for nginx
+#   certificate: /your/certificate/path
+#   private_key: /your/private/key/path
+{% endif %}
+
+{% if internal_tls is defined %}
+# Uncomment following will enable tls communication between all harbor components
+internal_tls:
+  # set enabled to true means internal tls is enabled
+  enabled: {{ internal_tls.enabled | lower }}
+  # put your cert and key files on dir
+  dir: {{ internal_tls.dir }}
+  # enable strong ssl ciphers (default: false)
+  {% if internal_tls.strong_ssl_ciphers is defined %}
+  strong_ssl_ciphers: {{ internal_tls.strong_ssl_ciphers | lower }}
+  {% else %}
+  strong_ssl_ciphers: false
+  {% endif %}
+{% else %}
+# internal_tls:
+#   # set enabled to true means internal tls is enabled
+#   enabled: true
+#   # put your cert and key files on dir
+#   dir: /etc/harbor/tls/internal
+#   # enable strong ssl ciphers (default: false)
+#   strong_ssl_ciphers: false
+{% endif %}
+
+# Uncomment external_url if you want to enable external proxy
+# And when it enabled the hostname will no longer used
+{% if external_url is defined %}
+external_url: {{ external_url }}
+{% else %}
+# external_url: https://reg.mydomain.com:8433
+{% endif %}
+
+# The initial password of Harbor admin
+# It only works in first time to install harbor
+# Remember Change the admin password from UI after launching Harbor.
+{% if harbor_admin_password is defined %}
+harbor_admin_password: {{ harbor_admin_password }}
+{% else %}
+harbor_admin_password: Harbor12345
+{% endif %}
+
+# Harbor DB configuration
+database:
+{% if database is defined %}
+  # The password for the root user of Harbor DB. Change this before any production use.
+  password: {{ database.password}}
+  # The maximum number of connections in the idle connection pool. If it <=0, no idle connections are retained.
+  max_idle_conns: {{ database.max_idle_conns }}
+  # The maximum number of open connections to the database. If it <= 0, then there is no limit on the number of open connections.
+  # Note: the default number of connections is 1024 for postgres of harbor.
+  max_open_conns: {{ database.max_open_conns }}
+{% else %}
+  # The password for the root user of Harbor DB. Change this before any production use.
+  password: root123
+  # The maximum number of connections in the idle connection pool. If it <=0, no idle connections are retained.
+  max_idle_conns: 100
+  # The maximum number of open connections to the database. If it <= 0, then there is no limit on the number of open connections.
+  # Note: the default number of connections is 1024 for postgres of harbor.
+  max_open_conns: 900
+{% endif %}
+
+{% if data_volume is defined %}
+# The default data volume
+data_volume: {{ data_volume }}
+{% else %}
+# The default data volume
+data_volume: /data
+{% endif %}
+
+# Harbor Storage settings by default is using /data dir on local filesystem
+# Uncomment storage_service setting If you want to using external storage
+{% if storage_service is defined %}
+storage_service:
+  {% for key, value in storage_service.items() %}
+    {% if key == 'ca_bundle' %}
+#   # ca_bundle is the path to the custom root ca certificate, which will be injected into the truststore
+#   # of registry's and chart repository's containers.  This is usually needed when the user hosts a internal storage with self signed certificate.
+  ca_bundle: {{ value if value is not none else '' }}
+    {% elif key == 'redirect' %}
+#   # set disable to true when you want to disable registry redirect
+  redirect:
+      {% if storage_service.redirect.disabled is defined %}
+    disable: {{ storage_service.redirect.disabled  | lower}}
+      {% else %}
+    disable: {{ storage_service.redirect.disable | lower}}
+      {% endif %}
+    {% else %}
+#   # storage backend, default is filesystem, options include filesystem, azure, gcs, s3, swift and oss
+#   # for more info about this configuration please refer https://docs.docker.com/registry/configuration/
+  {{ key }}:
+      {% for k, v in value.items() %}
+    {{ k }}: {{ v if v is not none else '' }}
+      {% endfor %}
+    {% endif %}
+  {% endfor %}
+{% else %}
+# Harbor Storage settings by default is using /data dir on local filesystem
+# Uncomment storage_service setting If you want to using external storage
+# storage_service:
+#   # ca_bundle is the path to the custom root ca certificate, which will be injected into the truststore
+#   # of registry's and chart repository's containers.  This is usually needed when the user hosts a internal storage with self signed certificate.
+#   ca_bundle:
+
+#   # storage backend, default is filesystem, options include filesystem, azure, gcs, s3, swift and oss
+#   # for more info about this configuration please refer https://docs.docker.com/registry/configuration/
+#   filesystem:
+#     maxthreads: 100
+#   # set disable to true when you want to disable registry redirect
+#   redirect:
+#     disable: false
+{% endif %}
+
+# Trivy configuration
+#
+# Trivy DB contains vulnerability information from NVD, Red Hat, and many other upstream vulnerability databases.
+# It is downloaded by Trivy from the GitHub release page https://github.com/aquasecurity/trivy-db/releases and cached
+# in the local file system. In addition, the database contains the update timestamp so Trivy can detect whether it
+# should download a newer version from the Internet or use the cached one. Currently, the database is updated every
+# 12 hours and published as a new release to GitHub.
+{% if trivy is defined %}
+trivy:
+  # ignoreUnfixed The flag to display only fixed vulnerabilities
+  {% if trivy.ignore_unfixed is defined %}
+  ignore_unfixed: {{ trivy.ignore_unfixed | lower }}
+  {% else %}
+  ignore_unfixed: false
+  {% endif %}
+  # timeout The duration to wait for scan completion
+  {% if trivy.timeout is defined %}
+  timeout: {{ trivy.timeout }}
+  {% else %}
+  timeout: 5m0s
+  {% endif %}
+  # skipUpdate The flag to enable or disable Trivy DB downloads from GitHub
+  #
+  # You might want to enable this flag in test or CI/CD environments to avoid GitHub rate limiting issues.
+  # If the flag is enabled you have to download the `trivy-offline.tar.gz` archive manually, extract `trivy.db` and
+  # `metadata.json` files and mount them in the `/home/scanner/.cache/trivy/db` path.
+  {% if trivy.skip_update is defined %}
+  skip_update: {{ trivy.skip_update | lower }}
+  {% else %}
+  skip_update: false
+  {% endif %}
+  #
+  {% if trivy.offline_scan is defined %}
+  offline_scan: {{ trivy.offline_scan | lower }}
+  {% else %}
+  offline_scan: false
+  {% endif %}
+  #
+  # Comma-separated list of what security issues to detect. Possible values are `vuln`, `config` and `secret`. Defaults to `vuln`.
+  {% if trivy.security_check is defined %}
+  security_check: {{ trivy.security_check }}
+  {% else %}
+  security_check: vuln
+  {% endif %}
+  #
+  # insecure The flag to skip verifying registry certificate
+  {% if trivy.insecure is defined %}
+  insecure: {{ trivy.insecure | lower }}
+  {% else %}
+  insecure: false
+  {% endif %}
+  # github_token The GitHub access token to download Trivy DB
+  #
+  # Anonymous downloads from GitHub are subject to the limit of 60 requests per hour. Normally such rate limit is enough
+  # for production operations. If, for any reason, it's not enough, you could increase the rate limit to 5000
+  # requests per hour by specifying the GitHub access token. For more details on GitHub rate limiting please consult
+  # https://developer.github.com/v3/#rate-limiting
+  #
+  # You can create a GitHub token by following the instructions in
+  # https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line
+  #
+  {% if trivy.github_token is defined %}
+  github_token: {{ trivy.github_token }}
+  {% else %}
+  # github_token: xxx
+  {% endif %}
+{% else %}
+# trivy:
+#   # ignoreUnfixed The flag to display only fixed vulnerabilities
+#   ignore_unfixed: false
+#   # skipUpdate The flag to enable or disable Trivy DB downloads from GitHub
+#   #
+#   # You might want to enable this flag in test or CI/CD environments to avoid GitHub rate limiting issues.
+#   # If the flag is enabled you have to download the `trivy-offline.tar.gz` archive manually, extract `trivy.db` and
+#   # `metadata.json` files and mount them in the `/home/scanner/.cache/trivy/db` path.
+#   skip_update: false
+#   #
+#   #The offline_scan option prevents Trivy from sending API requests to identify dependencies.
+#   # Scanning JAR files and pom.xml may require Internet access for better detection, but this option tries to avoid it.
+#   # For example, the offline mode will not try to resolve transitive dependencies in pom.xml when the dependency doesn't
+#   # exist in the local repositories. It means a number of detected vulnerabilities might be fewer in offline mode.
+#   # It would work if all the dependencies are in local.
+#   # This option doesn’t affect DB download. You need to specify "skip-update" as well as "offline-scan" in an air-gapped environment.
+#   offline_scan: false
+#   #
+#   # insecure The flag to skip verifying registry certificate
+#   insecure: false
+#   # github_token The GitHub access token to download Trivy DB
+#   #
+#   # Anonymous downloads from GitHub are subject to the limit of 60 requests per hour. Normally such rate limit is enough
+#   # for production operations. If, for any reason, it's not enough, you could increase the rate limit to 5000
+#   # requests per hour by specifying the GitHub access token. For more details on GitHub rate limiting please consult
+#   # https://developer.github.com/v3/#rate-limiting
+#   #
+#   # You can create a GitHub token by following the instructions in
+#   # https://help.github.com/en/github/authenticating-to-github/creating-a-personal-access-token-for-the-command-line
+#   #
+#   # github_token: xxx
+{% endif %}
+
+jobservice:
+  # Maximum number of job workers in job service
+{% if jobservice is defined %}
+  max_job_workers: {{ jobservice.max_job_workers }}
+  # The jobLoggers backend name, only support "STD_OUTPUT", "FILE" and/or "DB"
+  {% if jobservice.job_loggers is defined %}
+  job_loggers:
+    {% for job_logger in jobservice.job_loggers %}
+    - {{job_logger}}
+    {% endfor %}
+  {% else %}
+  job_loggers:
+    - STD_OUTPUT
+    - FILE
+    # - DB
+  {% endif %}
+  # The jobLogger sweeper duration (ignored if `jobLogger` is `stdout`)
+  {% if jobservice.logger_sweeper_duration is defined %}
+  logger_sweeper_duration: {{ jobservice.logger_sweeper_duration }}
+  {% else %}
+  logger_sweeper_duration: 1
+  {% endif %}
+{% else %}
+  max_job_workers: 10
+  # The jobLoggers backend name, only support "STD_OUTPUT", "FILE" and/or "DB"
+  job_loggers:
+    - STD_OUTPUT
+    - FILE
+    # - DB
+  # The jobLogger sweeper duration (ignored if `jobLogger` is `stdout`)
+  logger_sweeper_duration: 1
+{% endif %}
+
+notification:
+  # Maximum retry count for webhook job
+{% if notification is defined %}
+  webhook_job_max_retry: {{ notification.webhook_job_max_retry}}
+  # HTTP client timeout for webhook job
+  {% if notification.webhook_job_http_client_timeout is defined %}
+  webhook_job_http_client_timeout: {{ notification.webhook_job_http_client_timeout }}
+  {% else %}
+  webhook_job_http_client_timeout: 3 #seconds
+  {% endif %}
+{% else %}
+  webhook_job_max_retry: 3
+  # HTTP client timeout for webhook job
+  webhook_job_http_client_timeout: 3 #seconds
+{% endif %}
+
+# Log configurations
+log:
+  # options are debug, info, warning, error, fatal
+{% if log is defined %}
+  level: {{ log.level }}
+  # configs for logs in local storage
+  local:
+    # Log files are rotated log_rotate_count times before being removed. If count is 0, old versions are removed rather than rotated.
+    rotate_count: {{ log.local.rotate_count }}
+    # Log files are rotated only if they grow bigger than log_rotate_size bytes. If size is followed by k, the size is assumed to be in kilobytes.
+    # If the M is used, the size is in megabytes, and if G is used, the size is in gigabytes. So size 100, size 100k, size 100M and size 100G
+    # are all valid.
+    rotate_size: {{ log.local.rotate_size }}
+    # The directory on your host that store log
+    location: {{ log.local.location }}
+  {% if log.external_endpoint is defined %}
+  external_endpoint:
+    # protocol used to transmit log to external endpoint, options is tcp or udp
+    protocol: {{ log.external_endpoint.protocol }}
+    # The host of external endpoint
+    host: {{ log.external_endpoint.host }}
+    # Port of external endpoint
+    port: {{ log.external_endpoint.port }}
+  {% else %}
+    # Uncomment following lines to enable external syslog endpoint.
+    # external_endpoint:
+    #   # protocol used to transmit log to external endpoint, options is tcp or udp
+    #   protocol: tcp
+    #   # The host of external endpoint
+    #   host: localhost
+    #   # Port of external endpoint
+    #   port: 5140
+  {% endif %}
+{% else %}
+  level: info
+  # configs for logs in local storage
+  local:
+    # Log files are rotated log_rotate_count times before being removed. If count is 0, old versions are removed rather than rotated.
+    rotate_count: 50
+    # Log files are rotated only if they grow bigger than log_rotate_size bytes. If size is followed by k, the size is assumed to be in kilobytes.
+    # If the M is used, the size is in megabytes, and if G is used, the size is in gigabytes. So size 100, size 100k, size 100M and size 100G
+    # are all valid.
+    rotate_size: 200M
+    # The directory on your host that store log
+    location: /var/log/harbor
+
+  # Uncomment following lines to enable external syslog endpoint.
+  # external_endpoint:
+  #   # protocol used to transmit log to external endpoint, options is tcp or udp
+  #   protocol: tcp
+  #   # The host of external endpoint
+  #   host: localhost
+  #   # Port of external endpoint
+  #   port: 5140
+{% endif %}
+
+
+#This attribute is for migrator to detect the version of the .cfg file, DO NOT MODIFY!
+_version: 2.9.0
+{% if external_database is defined %}
+# Uncomment external_database if using external database.
+external_database:
+  harbor:
+    host: {{ external_database.harbor.host }}
+    port: {{ external_database.harbor.port }}
+    db_name: {{ external_database.harbor.db_name }}
+    username: {{ external_database.harbor.username }}
+    password: {{ external_database.harbor.password }}
+    ssl_mode: {{ external_database.harbor.ssl_mode }}
+    max_idle_conns: {{ external_database.harbor.max_idle_conns}}
+    max_open_conns: {{ external_database.harbor.max_open_conns}}
+{% else %}
+# Uncomment external_database if using external database.
+# external_database:
+#   harbor:
+#     host: harbor_db_host
+#     port: harbor_db_port
+#     db_name: harbor_db_name
+#     username: harbor_db_username
+#     password: harbor_db_password
+#     ssl_mode: disable
+#     max_idle_conns: 2
+#     max_open_conns: 0
+{% endif %}
+
+{% if redis is defined %}
+redis:
+#   # db_index 0 is for core, it's unchangeable
+{% if redis.registry_db_index is defined %}
+  registry_db_index: {{ redis.registry_db_index }}
+{% else %}
+#   # registry_db_index: 1
+{% endif %}
+{% if redis.jobservice_db_index is defined %}
+  jobservice_db_index: {{ redis.jobservice_db_index }}
+{% else %}
+#   # jobservice_db_index: 2
+{% endif %}
+{% if redis.trivy_db_index is defined %}
+  trivy_db_index: {{ redis.trivy_db_index }}
+{% else %}
+#   # trivy_db_index: 5
+{% endif %}
+{% if redis.harbor_db_index is defined %}
+  harbor_db_index: {{ redis.harbor_db_index }}
+{% else %}
+#   # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
+#   # harbor_db_index: 6
+{% endif %}
+{% if redis.cache_layer_db_index is defined %}
+  cache_layer_db_index: {{ redis.cache_layer_db_index }}
+{% else %}
+#   # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
+#   # cache_layer_db_index: 7
+{% endif %}
+{% else %}
+# Uncomment redis if need to customize redis db
+# redis:
+#   # db_index 0 is for core, it's unchangeable
+#   # registry_db_index: 1
+#   # jobservice_db_index: 2
+#   # trivy_db_index: 5
+#   # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
+#   # harbor_db_index: 6
+#   # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
+#   # cache_layer_db_index: 7
+{% endif %}
+
+{% if external_redis is defined %}
+external_redis:
+  # support redis, redis+sentinel
+  # host for redis: <host_redis>:<port_redis>
+  # host for redis+sentinel:
+  #  <host_sentinel1>:<port_sentinel1>,<host_sentinel2>:<port_sentinel2>,<host_sentinel3>:<port_sentinel3>
+  host: {{ external_redis.host }}
+  password: {{ external_redis.password }}
+  # Redis AUTH command was extended in Redis 6, it is possible to use it in the two-arguments AUTH <username> <password> form.
+  {% if external_redis.username is defined %}
+  username: {{ external_redis.username }}
+  {% else %}
+  # username:
+  {% endif %}
+  # sentinel_master_set must be set to support redis+sentinel
+  #sentinel_master_set:
+  # db_index 0 is for core, it's unchangeable
+  registry_db_index: {{ external_redis.registry_db_index }}
+  jobservice_db_index: {{ external_redis.jobservice_db_index }}
+  trivy_db_index: 5
+  idle_timeout_seconds: 30
+  {% if external_redis.harbor_db_index is defined %}
+  harbor_db_index: {{ redis.harbor_db_index }}
+  {% else %}
+# # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
+# # harbor_db_index: 6
+  {% endif %}
+  {% if external_redis.cache_layer_db_index is defined %}
+  cache_layer_db_index: {{ redis.cache_layer_db_index }}
+  {% else %}
+# # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
+# # cache_layer_db_index: 7
+  {% endif %}
+{% else %}
+# Umcomments external_redis if using external Redis server
+# external_redis:
+#   # support redis, redis+sentinel
+#   # host for redis: <host_redis>:<port_redis>
+#   # host for redis+sentinel:
+#   #  <host_sentinel1>:<port_sentinel1>,<host_sentinel2>:<port_sentinel2>,<host_sentinel3>:<port_sentinel3>
+#   host: redis:6379
+#   password:
+#   # Redis AUTH command was extended in Redis 6, it is possible to use it in the two-arguments AUTH <username> <password> form.
+#   # username:
+#   # sentinel_master_set must be set to support redis+sentinel
+#   #sentinel_master_set:
+#   # db_index 0 is for core, it's unchangeable
+#   registry_db_index: 1
+#   jobservice_db_index: 2
+#   trivy_db_index: 5
+#   idle_timeout_seconds: 30
+#   # it's optional, the db for harbor business misc, by default is 0, uncomment it if you want to change it.
+#   # harbor_db_index: 6
+#   # it's optional, the db for harbor cache layer, by default is 0, uncomment it if you want to change it.
+#   # cache_layer_db_index: 7
+{% endif %}
+
+{% if uaa is defined %}
+# Uncomment uaa for trusting the certificate of uaa instance that is hosted via self-signed cert.
+uaa:
+  ca_file: {{ uaa.ca_file }}
+{% else %}
+# Uncomment uaa for trusting the certificate of uaa instance that is hosted via self-signed cert.
+# uaa:
+#   ca_file: /path/to/ca
+{% endif %}
+
+
+# Global proxy
+# Config http proxy for components, e.g. http://my.proxy.com:3128
+# Components doesn't need to connect to each others via http proxy.
+# Remove component from `components` array if want disable proxy
+# for it. If you want use proxy for replication, MUST enable proxy
+# for core and jobservice, and set `http_proxy` and `https_proxy`.
+# Add domain to the `no_proxy` field, when you want disable proxy
+# for some special registry.
+{% if proxy is defined %}
+proxy:
+  http_proxy: {{ proxy.http_proxy or ''}}
+  https_proxy: {{ proxy.https_proxy or ''}}
+  no_proxy: {{ proxy.no_proxy or ''}}
+  {% if proxy.components is defined %}
+  components:
+    {% for component in proxy.components %}
+      {% if component != 'clair' %}
+    - {{component}}
+      {% endif %}
+    {% endfor %}
+  {% endif %}
+{% else %}
+proxy:
+  http_proxy:
+  https_proxy:
+  no_proxy:
+  components:
+    - core
+    - jobservice
+    - trivy
+{% endif %}
+
+{% if metric is defined %}
+metric:
+  enabled: {{ metric.enabled }}
+  port: {{ metric.port }}
+  path: {{ metric.path }}
+{% else %}
+# metric:
+#   enabled: false
+#   port: 9090
+#   path: /metric
+{% endif %}
+
+# Trace related config
+# only can enable one trace provider(jaeger or otel) at the same time,
+# and when using jaeger as provider, can only enable it with agent mode or collector mode.
+# if using jaeger collector mode, uncomment endpoint and uncomment username, password if needed
+# if using jaeger agetn mode uncomment agent_host and agent_port
+{% if trace is defined %}
+trace:
+  enabled: {{ trace.enabled | lower}}
+  sample_rate: {{ trace.sample_rate }}
+  # # namespace used to diferenciate different harbor services
+  {% if trace.namespace is defined %}
+  namespace: {{ trace.namespace }}
+  {% else %}
+  # namespace:
+  {% endif %}
+   # # attributes is a key value dict contains user defined attributes used to initialize trace provider
+  {% if trace.attributes is defined%}
+  attributes:
+    {% for name, value in trace.attributes.items() %}
+    {{name}}: {{value}}
+    {% endfor %}
+  {% else %}
+  # attributes:
+  #   application: harbor
+  {% endif %}
+  {% if trace.jaeger is defined%}
+  jaeger:
+    endpoint: {{trace.jaeger.endpoint or '' }}
+    username: {{trace.jaeger.username or ''}}
+    password: {{trace.jaeger.password or ''}}
+    agent_host: {{trace.jaeger.agent_host or ''}}
+    agent_port: {{trace.jaeger.agent_port or ''}}
+  {% else %}
+  # jaeger:
+  #   endpoint:
+  #   username:
+  #   password:
+  #   agent_host:
+  #   agent_port:
+  {% endif %}
+  {% if trace. otel is defined %}
+  otel:
+    endpoint: {{trace.otel.endpoint or '' }}
+    url_path: {{trace.otel.url_path or '' }}
+    compression: {{trace.otel.compression | lower }}
+    insecure: {{trace.otel.insecure | lower }}
+    timeout: {{trace.otel.timeout or '' }}
+  {% else %}
+  # otel:
+  #   endpoint: hostname:4318
+  #   url_path: /v1/traces
+  #   compression: false
+  #   insecure: true
+  #   # timeout is in seconds
+  #   timeout: 10
+  {% endif%}
+{% else %}
+# trace:
+#   enabled: true
+#   # set sample_rate to 1 if you wanna sampling 100% of trace data; set 0.5 if you wanna sampling 50% of trace data, and so forth
+#   sample_rate: 1
+#   # # namespace used to differenciate different harbor services
+#   # namespace:
+#   # # attributes is a key value dict contains user defined attributes used to initialize trace provider
+#   # attributes:
+#   #   application: harbor
+#   # jaeger:
+#   #   endpoint: http://hostname:14268/api/traces
+#   #   username:
+#   #   password:
+#   #   agent_host: hostname
+#   #   agent_port: 6832
+#   # otel:
+#   #   endpoint: hostname:4318
+#   #   url_path: /v1/traces
+#   #   compression: false
+#   #   insecure: true
+#   #   # timeout is in seconds
+#   #   timeout: 10
+{% endif %}
+
+# enable purge _upload directories
+{% if upload_purging is defined %}
+upload_purging:
+  enabled: {{ upload_purging.enabled | lower}}
+  age: {{ upload_purging.age }}
+  interval: {{ upload_purging.interval }}
+  dryrun: {{ upload_purging.dryrun | lower}}
+{% else %}
+upload_purging:
+  enabled: true
+  # remove files in _upload directories which exist for a period of time, default is one week.
+  age: 168h
+  # the interval of the purge operations
+  interval: 24h
+  dryrun: false
+{% endif %}
+
+# Cache related config
+{% if cache is defined %}
+cache:
+  enabled: {{ cache.enabled | lower}}
+  expire_hours: {{ cache.expire_hours }}
+{% else %}
+cache:
+  enabled: false
+  expire_hours: 24
+{% endif %}
+
+# Harbor core configurations
+{% if core is defined %}
+core:
+  # The provider for updating project quota(usage), there are 2 options, redis or db,
+  # by default is implemented by db but you can switch the updation via redis which
+  # can improve the performance of high concurrent pushing to the same project,
+  # and reduce the database connections spike and occupies.
+  # By redis will bring up some delay for quota usage updation for display, so only
+  # suggest switch provider to redis if you were ran into the db connections spike aroud
+  # the scenario of high concurrent pushing to same project, no improvment for other scenes.
+  quota_update_provider: {{ core.quota_update_provider }}
+{% else %}
+# core:
+#   # The provider for updating project quota(usage), there are 2 options, redis or db,
+#   # by default is implemented by db but you can switch the updation via redis which
+#   # can improve the performance of high concurrent pushing to the same project,
+#   # and reduce the database connections spike and occupies.
+#   # By redis will bring up some delay for quota usage updation for display, so only
+#   # suggest switch provider to redis if you were ran into the db connections spike aroud
+#   # the scenario of high concurrent pushing to same project, no improvment for other scenes.
+#   quota_update_provider: redis # Or db
+{% endif %}

From d967ac0fb659ef84f500c94cd67c803da6d179c8 Mon Sep 17 00:00:00 2001
From: Shijun Sun <30999793+AllForNothing@users.noreply.github.com>
Date: Wed, 22 Nov 2023 14:37:31 +0800
Subject: [PATCH 3/8] Update the permission scope (#19603)

1. Update the permission scope
2. Sort the resources and actions by unicode

Signed-off-by: AllForNothing <sshijun@vmware.com>
---
 src/common/rbac/const.go                      | 29 +++++++++----------
 .../robot-permissions-panel.component.ts      |  2 ++
 tests/apitests/python/test_robot_account.py   |  2 +-
 3 files changed, 16 insertions(+), 17 deletions(-)

diff --git a/src/common/rbac/const.go b/src/common/rbac/const.go
index 1ae653bccb8..3c64a853366 100644
--- a/src/common/rbac/const.go
+++ b/src/common/rbac/const.go
@@ -102,9 +102,7 @@ var (
 
 			{Resource: ResourceReplication, Action: ActionRead},
 			{Resource: ResourceReplication, Action: ActionCreate},
-			{Resource: ResourceReplication, Action: ActionDelete},
 			{Resource: ResourceReplication, Action: ActionList},
-			{Resource: ResourceReplication, Action: ActionUpdate},
 
 			{Resource: ResourceReplicationAdapter, Action: ActionList},
 
@@ -145,7 +143,6 @@ var (
 			{Resource: ResourceLabel, Action: ActionRead},
 			{Resource: ResourceLabel, Action: ActionCreate},
 			{Resource: ResourceLabel, Action: ActionDelete},
-			{Resource: ResourceLabel, Action: ActionList},
 			{Resource: ResourceLabel, Action: ActionUpdate},
 
 			{Resource: ResourceSecurityHub, Action: ActionRead},
@@ -154,11 +151,7 @@ var (
 			{Resource: ResourceCatalog, Action: ActionRead},
 		},
 		"Project": {
-			{Resource: ResourceLabel, Action: ActionRead},
-			{Resource: ResourceLabel, Action: ActionCreate},
-			{Resource: ResourceLabel, Action: ActionDelete},
-			{Resource: ResourceLabel, Action: ActionList},
-			{Resource: ResourceLabel, Action: ActionUpdate},
+			{Resource: ResourceLog, Action: ActionList},
 
 			{Resource: ResourceProject, Action: ActionRead},
 			{Resource: ResourceProject, Action: ActionDelete},
@@ -192,7 +185,7 @@ var (
 
 			{Resource: ResourceAccessory, Action: ActionList},
 
-			{Resource: ResourceArtifactAddition, Action: ActionCreate},
+			{Resource: ResourceArtifactAddition, Action: ActionRead},
 
 			{Resource: ResourceArtifactLabel, Action: ActionCreate},
 			{Resource: ResourceArtifactLabel, Action: ActionDelete},
@@ -211,19 +204,23 @@ var (
 			{Resource: ResourceImmutableTag, Action: ActionList},
 			{Resource: ResourceImmutableTag, Action: ActionUpdate},
 
+			{Resource: ResourceNotificationPolicy, Action: ActionRead},
+			{Resource: ResourceNotificationPolicy, Action: ActionCreate},
+			{Resource: ResourceNotificationPolicy, Action: ActionDelete},
+			{Resource: ResourceNotificationPolicy, Action: ActionList},
+			{Resource: ResourceNotificationPolicy, Action: ActionUpdate},
+
 			{Resource: ResourceTagRetention, Action: ActionRead},
 			{Resource: ResourceTagRetention, Action: ActionCreate},
 			{Resource: ResourceTagRetention, Action: ActionDelete},
 			{Resource: ResourceTagRetention, Action: ActionList},
 			{Resource: ResourceTagRetention, Action: ActionUpdate},
 
-			{Resource: ResourceLog, Action: ActionList},
-
-			{Resource: ResourceNotificationPolicy, Action: ActionRead},
-			{Resource: ResourceNotificationPolicy, Action: ActionCreate},
-			{Resource: ResourceNotificationPolicy, Action: ActionDelete},
-			{Resource: ResourceNotificationPolicy, Action: ActionList},
-			{Resource: ResourceNotificationPolicy, Action: ActionUpdate},
+			{Resource: ResourceLabel, Action: ActionRead},
+			{Resource: ResourceLabel, Action: ActionCreate},
+			{Resource: ResourceLabel, Action: ActionDelete},
+			{Resource: ResourceLabel, Action: ActionList},
+			{Resource: ResourceLabel, Action: ActionUpdate},
 		},
 	}
 )
diff --git a/src/portal/src/app/shared/components/robot-permissions-panel/robot-permissions-panel.component.ts b/src/portal/src/app/shared/components/robot-permissions-panel/robot-permissions-panel.component.ts
index 59ef454c2df..e79c9e4dd52 100644
--- a/src/portal/src/app/shared/components/robot-permissions-panel/robot-permissions-panel.component.ts
+++ b/src/portal/src/app/shared/components/robot-permissions-panel/robot-permissions-panel.component.ts
@@ -88,6 +88,8 @@ export class RobotPermissionsPanelComponent
                 this.candidateActions.push(item?.action);
             }
         });
+        this.candidateActions.sort();
+        this.candidateResources.sort();
     }
 
     isCandidate(resource: string, action: string): boolean {
diff --git a/tests/apitests/python/test_robot_account.py b/tests/apitests/python/test_robot_account.py
index ddc6cbc6143..6d7db141eb5 100644
--- a/tests/apitests/python/test_robot_account.py
+++ b/tests/apitests/python/test_robot_account.py
@@ -162,7 +162,7 @@ def verify_repository_unpushable(self, project_access_list, system_ra_client, ex
                 expected_error_message = expected_error_message
             )
 
-    def Atest_02_SystemlevelRobotAccount(self):
+    def test_02_SystemlevelRobotAccount(self):
         """
         Test case:
             Robot Account

From 13ae233729669b79968f4c075a23583abea413d4 Mon Sep 17 00:00:00 2001
From: Shijun Sun <30999793+AllForNothing@users.noreply.github.com>
Date: Wed, 22 Nov 2023 16:13:04 +0800
Subject: [PATCH 4/8] Add test cases for the project level permissions (#19593)

Add API test cases for project permission of a robot account

Signed-off-by: AllForNothing <sshijun@vmware.com>
---
 .../python/test_project_permission.py         | 365 ++++++++++++++++++
 1 file changed, 365 insertions(+)
 create mode 100644 tests/apitests/python/test_project_permission.py

diff --git a/tests/apitests/python/test_project_permission.py b/tests/apitests/python/test_project_permission.py
new file mode 100644
index 00000000000..0c0ee206b39
--- /dev/null
+++ b/tests/apitests/python/test_project_permission.py
@@ -0,0 +1,365 @@
+import copy
+import json
+import time
+import requests
+import urllib3
+import os
+
+admin_name = os.environ.get("ADMIN_NAME")
+admin_password = os.environ.get("ADMIN_PASSWORD")
+user_name = os.environ.get("USER_NAME")
+password = os.environ.get("PASSWORD")
+harbor_base_url = os.environ.get("HARBOR_BASE_URL")
+resource = os.environ.get("RESOURCE")
+project_id = os.environ.get("PROJECT_ID")
+project_name = os.environ.get("PROJECT_NAME")
+# the source artifact should belong to the provided project, e.g. "nginx"
+source_artifact_name = os.environ.get("SOURCE_ARTIFACT_NAME")
+# the source artifact tag should belong to the provided project, e.g. "latest"
+source_artifact_tag = os.environ.get("SOURCE_ARTIFACT_TAG")
+id_or_name = None
+
+urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+
+class Permission:
+
+    def __init__(self, url, method, expect_status_code, payload=None, need_id_or_name=False, res_id_field=None,
+                 payload_id_field=None):
+        self.url = url
+        self.method = method
+        self.expect_status_code = expect_status_code
+        self.payload = payload
+        self.res_id_field = res_id_field
+        self.need_id_or_name = need_id_or_name
+        self.payload_id_field = payload_id_field if payload_id_field else res_id_field
+
+    def call(self):
+        global id_or_name
+        url = self.url
+        if self.need_id_or_name:
+            url = self.url.format(id_or_name)
+        response = requests.request(self.method, url, data=json.dumps(self.payload), verify=False,
+                                    auth=(user_name, password), headers={
+                "Content-Type": "application/json"
+            })
+        print("response: {}".format(response.text))
+        assert response.status_code == self.expect_status_code, ("Failed to call the {} {}, expected status code is {"
+                                                                 "}, but got {}, error msg is {}").format(
+            self.method, self.url, self.expect_status_code, response.status_code, response.text)
+        if self.res_id_field and self.payload_id_field and len(json.loads(response.text)) > 0:
+            id_or_name = json.loads(response.text)[0][self.res_id_field]
+
+
+# Project permissions:
+# 1. Resource: label, actions: ['read', 'create', 'update', 'delete', 'list']
+label_payload = {
+    "color": "#FFFFFF",
+    "description": "Just for testing",
+    "name": "label-name-{}".format(int(round(time.time() * 1000))),
+    "project_id": int(project_id),
+    "scope": "p",
+    "id": None
+}
+create_label = Permission("{}/labels".format(harbor_base_url), "POST", 201, label_payload)
+list_label = Permission("{}/labels?scope=p&project_id={}".format(harbor_base_url, project_id), "GET", 200,
+                        label_payload, False, "id", "id")
+read_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "GET", 200, label_payload, True)
+label_payload_for_update = copy.deepcopy(label_payload)
+label_payload_for_update["description"] = "For update"
+update_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "PUT", 200, label_payload_for_update, True)
+delete_label = Permission("{}/labels/{}".format(harbor_base_url, "{}"), "DELETE", 200, label_payload, True)
+
+# 2. Resource: project, actions: ['read', 'update', 'delete']
+project_payload_for_update = {"project_name": "test", "metadata": {"public": "false"}, "storage_limit": -1}
+read_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "GET", 200, project_payload_for_update,
+                          False)
+update_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "PUT", 200,
+                            project_payload_for_update, False)
+delete_project = Permission("{}/projects/{}".format(harbor_base_url, project_id), "DELETE", 200,
+                            project_payload_for_update, False)
+deletable_project = Permission("{}/projects/{}/_deletable".format(harbor_base_url, project_id), "GET", 200,
+                               project_payload_for_update, False)
+
+# 3. Resource: metadata   actions: ['read', 'list', 'create', 'update', 'delete'],
+metadata_payload = {
+    "auto_scan": "true"
+}
+create_metadata = Permission("{}/projects/{}/metadatas".format(harbor_base_url, project_id), "POST", 200,
+                             metadata_payload, False)
+list_metadata = Permission("{}/projects/{}/metadatas".format(harbor_base_url, project_id), "GET", 200, metadata_payload,
+                           False, )
+read_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "GET", 200,
+                           metadata_payload, False)
+metadata_payload_for_update = {
+    "auto_scan": "false"
+}
+update_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "PUT", 200,
+                             metadata_payload_for_update, False)
+delete_metadata = Permission("{}/projects/{}/metadatas/auto_scan".format(harbor_base_url, project_id), "DELETE", 200,
+                             metadata_payload, False)
+
+# 4. Resource: repository  actions: ['read', 'list', 'update', 'delete', 'pull', 'push']
+# note: pull and push are for docker cli,  no API needs them
+list_repo = Permission("{}/projects/{}/repositories".format(harbor_base_url, project_name), "GET", 200)
+read_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "GET", 404)
+repo_payload_for_update = {
+}
+update_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "PUT", 404,
+                         repo_payload_for_update)
+delete_repo = Permission("{}/projects/{}/repositories/does_not_exist".format(harbor_base_url, project_name), "DELETE",
+                         404)
+
+# 5. Resource artifact   actions: ['read', 'list', 'create', 'delete'],
+list_artifact = Permission("{}/projects/{}/repositories/does_not_exist/artifacts".format(harbor_base_url, project_name),
+                           "GET", 200)
+read_artifact = Permission(
+    "{}/projects/{}/repositories/does_not_exist/artifacts/reference_does_not_exist".format(harbor_base_url,
+                                                                                           project_name), "GET", 404)
+copy_artifact = Permission(
+    "{}/projects/{}/repositories/target_repo/artifacts?from={}/{}:{}".format(harbor_base_url, project_name,
+                                                                             project_name, source_artifact_name,
+                                                                             source_artifact_tag), "POST", 201)
+delete_artifact = Permission(
+    "{}/projects/{}/repositories/target_repo/artifacts/{}".format(harbor_base_url, project_name, source_artifact_tag),
+    "DELETE", 200)
+
+# 6. Resource scan      actions: ['read', 'create', 'stop']
+create_scan = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/scan".format(harbor_base_url, project_name, source_artifact_name,
+                                                              source_artifact_tag), "POST", 202)
+stop_scan = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/scan/stop".format(harbor_base_url, project_name, source_artifact_name,
+                                                                   source_artifact_tag), "POST", 202)
+read_scan = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/scan/0/log".format(harbor_base_url, project_name, source_artifact_name,
+                                                                    source_artifact_tag), "get", 404)
+
+# 7. Resource tag      actions: ['list', 'create', 'delete']
+tag_payload = {
+    "name": "test-{}".format(int(round(time.time() * 1000)))
+}
+create_tag = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/tags".format(harbor_base_url, project_name, source_artifact_name,
+                                                              source_artifact_tag), "POST", 201, tag_payload)
+list_tag = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/tags".format(harbor_base_url, project_name, source_artifact_name,
+                                                              source_artifact_tag), "GET", 200)
+delete_tag = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/tags/tag_name_does_not_exist".format(harbor_base_url, project_name,
+                                                                                      source_artifact_name,
+                                                                                      source_artifact_tag), "DELETE",
+    404)
+
+# 8. Resource accessory  actions: ['list']
+list_accessory = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/accessories".format(harbor_base_url, project_name,
+                                                                     source_artifact_name, source_artifact_tag), "GET",
+    200)
+
+# 9. Resource artifact-addition    actions: ['read']
+read_artifact_addition_vul = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/additions/vulnerabilities".format(harbor_base_url, project_name,
+                                                                                   source_artifact_name,
+                                                                                   source_artifact_tag), "GET", 200)
+read_artifact_addition_dependencies = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/additions/dependencies".format(harbor_base_url, project_name,
+                                                                                source_artifact_name,
+                                                                                source_artifact_tag), "GET", 400)
+
+# 10. Resource artifact-label     actions: ['create', 'delete'],
+artifact_label_payload = copy.deepcopy(label_payload)
+artifact_label_payload["description"] = "Add label to an artifact"
+add_label_to_artifact = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/labels".format(harbor_base_url, project_name, source_artifact_name,
+                                                                source_artifact_tag), "POST", 404,
+    artifact_label_payload)
+delete_artifact_label = Permission(
+    "{}/projects/{}/repositories/{}/artifacts/{}/labels/0".format(harbor_base_url, project_name, source_artifact_name,
+                                                                  source_artifact_tag), "DELETE", 404,
+    artifact_label_payload)
+
+# 11. Resource scanner           actions: ['create', 'read']
+update_project_scanner = Permission("{}/projects/{}/scanner".format(harbor_base_url, project_id), "PUT", 200,
+                                    {"uuid": "faked_uuid"})
+read_project_scanner = Permission("{}/projects/{}/scanner".format(harbor_base_url, project_id), "GET", 200)
+read_project_scanner_candidates = Permission("{}/projects/{}/scanner/candidates".format(harbor_base_url, project_id),
+                                             "GET", 200)
+
+# 12. Resource preheat-policy   actions: ['read', 'list', 'create', 'update', 'delete']
+create_preheat_policy = Permission("{}/projects/{}/preheat/policies".format(harbor_base_url, project_name), "POST", 500,
+                                   {})
+list_preheat_policy = Permission("{}/projects/{}/preheat/policies".format(harbor_base_url, project_name), "GET", 200)
+read_preheat_policy = Permission(
+    "{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "GET", 404)
+update_preheat_policy = Permission(
+    "{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "PUT", 500)
+delete_preheat_policy = Permission(
+    "{}/projects/{}/preheat/policies/policy_name_does_not_exist".format(harbor_base_url, project_name), "DELETE", 404)
+
+# 13. Resource immutable-tag   actions: ['list', 'create', 'update', 'delete']
+immutable_tag_rule_payload = {
+    "disabled": False,
+    "scope_selectors": {
+        "repository": [{"kind": "doublestar", "decoration": "repoMatches",
+                        "pattern": "{}".format(int(round(time.time() * 1000)))}]},
+    "tag_selectors": [
+        {"kind": "doublestar", "decoration": "matches", "pattern": "{}".format(int(round(time.time() * 1000)))}],
+}
+create_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules".format(harbor_base_url, project_id), "POST",
+                                       201,
+                                       immutable_tag_rule_payload)
+list_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules".format(harbor_base_url, project_id), "GET", 200)
+update_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules/0".format(harbor_base_url, project_id), "PUT",
+                                       404)
+delete_immutable_tag_rule = Permission("{}/projects/{}/immutabletagrules/0".format(harbor_base_url, project_id),
+                                       "DELETE", 404)
+
+# 14. Resource tag-retention   actions: ['read', 'list', 'create', 'update', 'delete']
+tag_retention_rule_payload = {
+    "algorithm": "or",
+    "rules": [
+        {
+            "disabled": False,
+            "action": "retain",
+            "scope_selectors": {
+                "repository": [
+                    {
+                        "kind": "doublestar",
+                        "decoration": "repoMatches",
+                        "pattern": "**"
+                    }
+                ]
+            },
+            "tag_selectors": [
+                {
+                    "kind": "doublestar",
+                    "decoration": "matches",
+                    "pattern": "**",
+                    "extras": "{\"untagged\":true}"
+                }
+            ],
+            "params": {},
+            "template": "always"
+        }
+    ],
+    "trigger": {
+        "kind": "Schedule",
+        "references": {},
+        "settings": {
+            "cron": ""
+        }
+    },
+    "scope": {
+        "level": "project",
+        "ref": int(project_id)
+    }
+}
+
+response = requests.request("GET", "{}/projects/{}/metadatas/retention_id".format(harbor_base_url, project_id),
+                            data=None, verify=False,
+                            auth=(admin_name, admin_password), headers={"Content-Type": "application/json"})
+create_status_code = 400 if "retention_id" in (json.loads(response.text)) else 201
+create_tag_retention_rule = Permission("{}/retentions".format(harbor_base_url, project_id), "POST",
+                                       create_status_code,
+                                       tag_retention_rule_payload)
+# get retention_id
+response1 = requests.request("GET", "{}/projects/{}/metadatas/retention_id".format(harbor_base_url, project_id),
+                             data=None, verify=False,
+                             auth=(admin_name, admin_password), headers={"Content-Type": "application/json"})
+retention_id = json.loads(response1.text)["retention_id"]
+update_retention_payload = copy.deepcopy(tag_retention_rule_payload)
+update_retention_payload["rules"][0]["disabled"] = True
+read_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "GET", 200)
+update_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "PUT", 200,
+                                  update_retention_payload)
+delete_tag_retention = Permission("{}/retentions/{}".format(harbor_base_url, retention_id), "DELETE", 200)
+execute_tag_retention = Permission("{}/retentions/{}/executions".format(harbor_base_url, retention_id), "POST", 201)
+list_tag_retention_execution = Permission("{}/retentions/{}/executions".format(harbor_base_url, retention_id), "GET",
+                                          200)
+stop_tag_retention = Permission("{}/retentions/{}/executions/0".format(harbor_base_url, retention_id), "PATCH", 404,
+                                {"action": "stop"})
+list_tag_retention_tasks = Permission("{}/retentions/{}/executions/0/tasks".format(harbor_base_url, retention_id),
+                                      "GET", 404)
+read_tag_retention_tasks = Permission("{}/retentions/{}/executions/0/tasks/0".format(harbor_base_url, retention_id),
+                                      "GET", 404)
+
+# 15. Resource log   actions: ['list']
+list_log = Permission("{}/projects/{}/logs".format(harbor_base_url, project_name), "GET", 200)
+
+# 16. Resource notification-policy    actions: ['read', 'list', 'create', 'update', 'delete']
+webhook_payload = {
+    "name": "webhook-{}".format(int(round(time.time() * 1000))),
+    "description": "Just for test",
+    "project_id": int(project_id),
+    "targets": [
+        {
+            "type": "http",
+            "address": "http://test.com",
+            "skip_cert_verify": True,
+            "payload_format": "CloudEvents"
+        }
+    ],
+    "event_types": [
+        "PUSH_ARTIFACT"
+    ],
+    "enabled": True
+}
+
+create_webhook = Permission("{}/projects/{}/webhook/policies".format(harbor_base_url, project_id), "POST",
+                            201,
+                            webhook_payload)
+list_webhook = Permission("{}/projects/{}/webhook/policies".format(harbor_base_url, project_id), "GET",
+                          200)
+read_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "GET",
+                          404)
+update_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "PUT",
+                            404, {})
+delete_webhook = Permission("{}/projects/{}/webhook/policies/0".format(harbor_base_url, project_id), "DELETE",
+                            404)
+
+list_webhook_executions = Permission("{}/projects/{}/webhook/policies/0/executions".format(harbor_base_url, project_id),
+                                     "GET", 404)
+list_webhook_executions_tasks = Permission(
+    "{}/projects/{}/webhook/policies/0/executions/0/tasks".format(harbor_base_url, project_id), "GET", 404)
+read_webhook_executions_tasks = Permission(
+    "{}/projects/{}/webhook/policies/0/executions/0/tasks/0/log".format(harbor_base_url, project_id), "GET", 404)
+list_webhook_events = Permission("{}/projects/{}/webhook/events".format(harbor_base_url, project_id), "GET", 200)
+
+resource_permissions = {
+    "label": [create_label, list_label, read_label, update_label, delete_label],
+    "project": [read_project, update_project, deletable_project, delete_project],
+    "metadata": [create_metadata, list_metadata, read_metadata, update_metadata, delete_metadata],
+    "repository": [list_repo, read_repo, update_repo, delete_repo],
+    "artifact": [list_artifact, read_artifact, copy_artifact, delete_artifact],
+    "scan": [create_scan, stop_scan, read_scan],
+    "tag": [create_tag, list_tag, delete_tag],
+    "accessory": [list_accessory],
+    "artifact-addition": [read_artifact_addition_vul, read_artifact_addition_dependencies],
+    "artifact-label": [add_label_to_artifact, delete_artifact_label],
+    "scanner": [update_project_scanner, read_project_scanner, read_project_scanner_candidates],
+    "preheat-policy": [create_preheat_policy, list_preheat_policy, read_preheat_policy, update_preheat_policy,
+                       delete_preheat_policy],
+    "immutable-tag": [create_immutable_tag_rule, list_immutable_tag_rule, update_immutable_tag_rule,
+                      delete_immutable_tag_rule],
+    "tag-retention": [create_tag_retention_rule, read_tag_retention, update_tag_retention, execute_tag_retention,
+                      list_tag_retention_execution, stop_tag_retention, list_tag_retention_tasks,
+                      read_tag_retention_tasks, delete_tag_retention],
+    "log": [list_log],
+    "notification-policy": [create_webhook, list_webhook, read_webhook, update_webhook, delete_webhook,
+                            list_webhook_executions, list_webhook_executions_tasks, read_webhook_executions_tasks,
+                            list_webhook_events]
+}
+
+
+def main():
+    for permission in resource_permissions[resource]:
+        print("=================================================")
+        print("call: {} {}".format(permission.method, permission.url))
+        print("payload: {}".format(json.dumps(permission.payload)))
+        print("=================================================\n")
+        permission.call()
+
+
+if __name__ == "__main__":
+    main()

From 45b41d4443c1eab8f88f5046c4890a00f3a078cb Mon Sep 17 00:00:00 2001
From: Yang Jiao <72076317+YangJiao0817@users.noreply.github.com>
Date: Wed, 22 Nov 2023 17:08:27 +0800
Subject: [PATCH 5/8] Add
 API(scan,volumes,jobservice,scanner,label,securityhub,catalog) permission
 testcases (#19595)

Add scan,volumes,jobservice,scanner,label,securityhub,catalog permission testcases

Signed-off-by: Yang Jiao <jiaoya@vmware.com>
---
 tests/apitests/python/test_permission.py | 94 +++++++++++++++++++++++-
 1 file changed, 92 insertions(+), 2 deletions(-)

diff --git a/tests/apitests/python/test_permission.py b/tests/apitests/python/test_permission.py
index ee9f4d20edd..6f154f6f1ef 100644
--- a/tests/apitests/python/test_permission.py
+++ b/tests/apitests/python/test_permission.py
@@ -15,6 +15,7 @@
 ID_PLACEHOLDER = "(id)"
 urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
 
+
 class Permission:
 
 
@@ -38,6 +39,7 @@ def call(self):
         elif self.res_id_field and self.payload_id_field and self.id_from_header == True:
             self.payload[self.payload_id_field] = int(response.headers["Location"].split("/")[-1])
 
+
 resource_permissions = {}
 # audit logs permissions start
 list_audit_logs = Permission("{}/audit-logs".format(harbor_base_url), "GET", 200)
@@ -167,9 +169,9 @@ def call(self):
 # replication permissions start
 replication_policy_id = None
 replication_policy_name = "replication-policy-{}".format(random.randint(1000, 9999))
+result = urlsplit(harbor_base_url)
+endpoint_URL = "{}://{}".format(result.scheme, result.netloc)
 if resource == "replication":
-    result = urlsplit(harbor_base_url)
-    endpoint_URL = "{}://{}".format(result.scheme, result.netloc)
     replication_registry_payload = {
         "credential": {
             "access_key": admin_user_name,
@@ -225,6 +227,94 @@ def call(self):
 resource_permissions["replication"] = replication
 # replication permissions end
 
+# scan all permissions start
+scan_all_weekly_schedule_payload = {
+    "schedule": {
+        "type": "Weekly",
+        "cron": "0 0 0 * * 0"
+    }
+}
+scan_all_reset_schedule_payload = {
+    "schedule": {
+        "type": "None",
+        "cron": ""
+    }
+}
+create_scan_all_schedule = Permission("{}/system/scanAll/schedule".format(harbor_base_url), "POST", 201, scan_all_weekly_schedule_payload)
+update_scan_all_schedule = Permission("{}/system/scanAll/schedule".format(harbor_base_url), "PUT", 200, scan_all_reset_schedule_payload)
+stop_scan_all = Permission("{}/system/scanAll/stop".format(harbor_base_url), "POST", 202)
+scan_all_metrics = Permission("{}/scans/all/metrics".format(harbor_base_url), "GET", 200)
+scan_all_schedule_metrics = Permission("{}/scans/schedule/metrics".format(harbor_base_url), "GET", 200)
+scan_all = [ create_scan_all_schedule, update_scan_all_schedule, stop_scan_all, scan_all_metrics, scan_all_schedule_metrics ]
+resource_permissions["scan-all"] = scan_all
+# scan all permissions end
+
+# system volumes permissions start
+read_system_volumes = Permission("{}/systeminfo/volumes".format(harbor_base_url), "GET", 200)
+system_volumes = [ read_system_volumes ]
+resource_permissions["system-volumes"] = system_volumes
+# system volumes permissions end
+
+# jobservice monitor permissions start
+list_jobservice_pool = Permission("{}/jobservice/pools".format(harbor_base_url), "GET", 200)
+list_jobservice_pool_worker = Permission("{}/jobservice/pools/{}/workers".format(harbor_base_url, "88888888"), "GET", 200)
+stop_jobservice_job = Permission("{}/jobservice/jobs/{}".format(harbor_base_url, "88888888"), "PUT", 200)
+get_jobservice_job_log = Permission("{}/jobservice/jobs/{}/log".format(harbor_base_url, "88888888"), "GET", 500)
+list_jobservice_queue = Permission("{}/jobservice/queues".format(harbor_base_url), "GET", 200)
+stop_jobservice = Permission("{}/jobservice/queues/{}".format(harbor_base_url, "88888888"), "PUT", 200, payload={ "action": "stop" })
+jobservice_monitor = [ list_jobservice_pool, list_jobservice_pool_worker, stop_jobservice_job, get_jobservice_job_log, list_jobservice_queue, stop_jobservice ]
+resource_permissions["jobservice-monitor"] = jobservice_monitor
+# jobservice monitor permissions end
+
+# scanner permissions start
+scanner_payload = {
+    "name": "scanner-{}".format(random.randint(1000, 9999)),
+    "url": "https://{}".format(random.randint(1000, 9999)),
+    "description": None,
+    "auth": "",
+    "skip_certVerify": False,
+    "use_internal_addr": False
+}
+list_scanner = Permission("{}/scanners".format(harbor_base_url), "GET", 200)
+create_scanner = Permission("{}/scanners".format(harbor_base_url), "POST", 500, payload=scanner_payload)
+ping_scanner = Permission("{}/scanners/ping".format(harbor_base_url), "POST", 500, payload=scanner_payload)
+read_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "GET", 404)
+update_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "PUT", 404, payload=scanner_payload)
+delete_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "DELETE", 404)
+set_default_scanner = Permission("{}/scanners/{}".format(harbor_base_url, "88888888"), "PATCH", 404, payload={ "is_default": True })
+get_scanner_metadata = Permission("{}/scanners/{}/metadata".format(harbor_base_url, "88888888"), "GET", 404)
+scanner = [ list_scanner, create_scanner, ping_scanner, read_scanner, update_scanner, delete_scanner, set_default_scanner, get_scanner_metadata ]
+resource_permissions["scanner"] = scanner
+# scanner permissions end
+
+# system label permissions start
+label_payload = {
+    "name": "label-{}".format(random.randint(1000, 9999)),
+    "description": "",
+    "color": "",
+    "scope": "g",
+    "project_id": 0
+}
+create_label = Permission("{}/labels".format(harbor_base_url), "POST", 201, label_payload, "id", id_from_header=True)
+read_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "GET", 200, payload=label_payload, payload_id_field="id")
+update_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "PUT", 200, payload=label_payload, payload_id_field="id")
+delete_label = Permission("{}/labels/{}".format(harbor_base_url, ID_PLACEHOLDER), "DELETE", 200, payload=label_payload, payload_id_field="id")
+label = [ create_label, read_label, update_label, delete_label ]
+resource_permissions["label"] = label
+# system label permissions end
+
+# security hub permissions start
+read_summary = Permission("{}/security/summary".format(harbor_base_url), "GET", 200)
+list_vul = Permission("{}/security/vul".format(harbor_base_url), "GET", 200)
+security_hub = [ read_summary, list_vul ]
+resource_permissions["security-hub"] = security_hub
+# security hub permissions end
+
+# catalog permissions start
+read_catalog = Permission("{}/v2/_catalog".format(endpoint_URL), "GET", 200)
+catalog = [ read_catalog ]
+resource_permissions["catalog"] = catalog
+# catalog permissions end
 
 
 def main():

From 969dd1be66ea53e8cf284e0769a895c0fb777601 Mon Sep 17 00:00:00 2001
From: Yang Jiao <72076317+YangJiao0817@users.noreply.github.com>
Date: Wed, 22 Nov 2023 18:13:44 +0800
Subject: [PATCH 6/8] Add notation accessory copy test case (#19605)

Fix #19546

Signed-off-by: Yang Jiao <jiaoya@vmware.com>
---
 tests/resources/Harbor-Pages/Project.robot    | 22 +++++-
 tests/resources/Notation_Util.robot           | 26 +++++++
 tests/resources/Util.robot                    |  1 +
 tests/robot-cases/Group1-Nightly/Common.robot | 17 +++--
 .../Group1-Nightly/Replication.robot          | 72 +++++++++----------
 5 files changed, 95 insertions(+), 43 deletions(-)
 create mode 100644 tests/resources/Notation_Util.robot

diff --git a/tests/resources/Harbor-Pages/Project.robot b/tests/resources/Harbor-Pages/Project.robot
index fe3a88586ea..62d5c84208c 100644
--- a/tests/resources/Harbor-Pages/Project.robot
+++ b/tests/resources/Harbor-Pages/Project.robot
@@ -375,13 +375,29 @@ Back Project Home
     [Arguments]  ${project_name}
     Retry Link Click  //a[contains(.,'${project_name}')]
 
-Should Not Be Signed By Cosign
+Should Be Signed
+    [Arguments]  ${tag}
+    Retry Wait Element Visible  //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'signed')]
+
+Should Not Be Signed
     [Arguments]  ${tag}
     Retry Wait Element Visible  //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'color-red')]
 
 Should Be Signed By Cosign
-    [Arguments]  ${tag}
-    Retry Wait Element Visible  //clr-dg-row[contains(.,'${tag}')]//clr-icon[contains(@class,'signed')]
+    [Arguments]  ${tag}=${null}  ${digest}=${null}
+    IF  '${tag}' != '${null}'
+        Retry Wait Element Visible  //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/clr-tooltip/div/div/span[contains(.,'${tag}')] and .//clr-dg-row[.//img[@title='signature.cosign']]]
+    ELSE
+        Retry Wait Element Visible  //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/a[contains(.,'${digest}')] and .//clr-dg-row[.//img[@title='signature.cosign']]]
+    END
+
+Should Be Signed By Notation
+    [Arguments]  ${tag}=${null}  ${digest}=${null}
+    IF  '${tag}' != '${null}'
+        Retry Wait Element Visible  //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/clr-tooltip/div/div/span[contains(.,'${tag}')] and .//clr-dg-row[.//img[@title='signature.notation']]]
+    ELSE
+        Retry Wait Element Visible  //clr-dg-row[./clr-expandable-animation/div/div/div/clr-dg-cell/div/a[contains(.,'${digest}')] and .//clr-dg-row[.//img[@title='signature.notation']]]
+    END
 
 Delete Accessory
     [Arguments]  ${tag}
diff --git a/tests/resources/Notation_Util.robot b/tests/resources/Notation_Util.robot
new file mode 100644
index 00000000000..ac5f6e6c134
--- /dev/null
+++ b/tests/resources/Notation_Util.robot
@@ -0,0 +1,26 @@
+# Copyright Project Harbor Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#	http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+*** Settings ***
+Documentation  This resource provides helper functions for docker operations
+Library  OperatingSystem
+Library  Process
+
+*** Keywords ***
+Notation Generate Cert
+    Run And Return Rc And Output  notation cert generate-test --default wabbit-networks.io
+
+Notation Sign
+    [Arguments]  ${artifact}
+    Wait Unitl Command Success  notation sign -d --allow-referrers-api ${artifact}
diff --git a/tests/resources/Util.robot b/tests/resources/Util.robot
index 17c86a9d25f..b368e9dbbe0 100644
--- a/tests/resources/Util.robot
+++ b/tests/resources/Util.robot
@@ -82,6 +82,7 @@ Resource  SeleniumUtil.robot
 Resource  Nightly-Util.robot
 Resource  APITest-Util.robot
 Resource  Cosign_Util.robot
+Resource  Notation_Util.robot
 Resource  Imgpkg-Util.robot
 Resource  Webhook-Util.robot
 Resource  TestCaseBody.robot
diff --git a/tests/robot-cases/Group1-Nightly/Common.robot b/tests/robot-cases/Group1-Nightly/Common.robot
index 6b5246db432..63e1c9e516e 100644
--- a/tests/robot-cases/Group1-Nightly/Common.robot
+++ b/tests/robot-cases/Group1-Nightly/Common.robot
@@ -474,19 +474,28 @@ Test Case - Copy A Image And Accessory
     Create An New Project And Go Into Project  ${source_project}
 
     Push Image With Tag  ${ip}  ${user}  ${pwd}  ${source_project}  ${image}  ${tag}
-    Cosign Generate Key Pair
     Docker Login  ${ip}  ${user}  ${pwd}
+    Cosign Generate Key Pair
     Cosign Sign  ${ip}/${source_project}/${image}:${tag}
-    Docker Logout  ${ip}
+    Notation Generate Cert
+    Notation Sign  ${ip}/${source_project}/${image}:${tag}
+
     Go Into Repo  ${source_project}  ${image}
+    Should Be Signed  ${tag}
+    Retry Button Click  ${artifact_list_accessory_btn}
     Should Be Signed By Cosign  ${tag}
+    Should Be Signed By Notation  ${tag}
 
     Copy Image  ${tag}  ${target_project}  ${image}
 
     Retry Double Keywords When Error  Go Into Project  ${target_project}  Retry Wait Until Page Contains  ${image}
     Go Into Repo  ${target_project}  ${image}
     Retry Wait Until Page Contains Element  //clr-dg-row[contains(.,${tag})]
+    Should Be Signed  ${tag}
+    Retry Button Click  ${artifact_list_accessory_btn}
     Should Be Signed By Cosign  ${tag}
+    Should Be Signed By Notation  ${tag}
+    Docker Logout  ${ip}
     Close Browser
 
 Test Case - Create An New Project With Quotas Set
@@ -772,14 +781,14 @@ Test Case - Cosign And Cosign Deployment Security Policy
     Push Image With Tag  ${ip}  ${user}  ${pwd}  project${d}  ${image}  ${tag}
     Go Into Project  project${d}
     Go Into Repo  project${d}  ${image}
-    Should Not Be Signed By Cosign  ${tag}
+    Should Not Be Signed  ${tag}
     Cannot Pull Image  ${ip}  ${user}  ${pwd}  project${d}  ${image}:${tag}  err_msg=The image is not signed by cosign.
     Cosign Generate Key Pair
     Cosign Verify  ${ip}/project${d}/${image}:${tag}  ${false}
 
     Cosign Sign  ${ip}/project${d}/${image}:${tag}
     Cosign Verify  ${ip}/project${d}/${image}:${tag}  ${true}
-    Retry Double Keywords When Error  Retry Element Click  ${artifact_list_refresh_btn}  Should Be Signed By Cosign  ${tag}
+    Retry Double Keywords When Error  Retry Element Click  ${artifact_list_refresh_btn}  Should Be Signed  ${tag}
     Pull image  ${ip}  ${user}  ${pwd}  project${d}  ${image}:${tag}
 
     Retry Double Keywords When Error  Delete Accessory  ${tag}  Should be Accessory deleted  ${tag}
diff --git a/tests/robot-cases/Group1-Nightly/Replication.robot b/tests/robot-cases/Group1-Nightly/Replication.robot
index 1b1ec7e10b0..0380432b75b 100644
--- a/tests/robot-cases/Group1-Nightly/Replication.robot
+++ b/tests/robot-cases/Group1-Nightly/Replication.robot
@@ -389,16 +389,16 @@ Test Case - Robot Account Do Replication
     Logout Harbor
     Sign In Harbor  https://${ip1}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
     Image Should Be Replicated To Project  project_dest${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Image Should Be Replicated To Project  project_dest${d}  ${image2}
-    Should Be Signed By Cosign  ${tag2}
+    Should Be Signed  ${tag2}
     Back Project Home  project_dest${d}
     Go Into Repo  project_dest${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed  ${image2_short_sha256}
     # pull mode
     Logout Harbor
     Sign In Harbor  ${HARBOR_URL}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
@@ -409,16 +409,16 @@ Test Case - Robot Account Do Replication
     Check Latest Replication Job Status  Succeeded
     Check Latest Replication Enabled Copy By Chunk
     Image Should Be Replicated To Project  project_dest${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Image Should Be Replicated To Project  project_dest${d}  ${image2}
-    Should Be Signed By Cosign  ${tag2}
+    Should Be Signed  ${tag2}
     Back Project Home  project_dest${d}
     Go Into Repo  project_dest${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed  ${image2_short_sha256}
     Close Browser
 
 Test Case - Replication Triggered By Events
@@ -468,28 +468,28 @@ Test Case - Replication Triggered By Events
     Logout Harbor
     Sign In Harbor  ${HARBOR_URL}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
     Go Into Repo  project${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Go Into Repo  project${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project${d}  ${image2}
-    Should Not Be Signed By Cosign  ${tag2}
+    Should Not Be Signed  ${tag2}
     Go Into Repo  project${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed  ${image2_short_sha256}
     Logout Harbor
 
     Sign In Harbor  https://${ip1}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
     Go Into Repo  project_dest${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Go Into Repo  project_dest${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project_dest${d}  ${image2}
-    Should Not Be Signed By Cosign  ${tag2}
+    Should Not Be Signed  ${tag2}
     Go Into Repo  project_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Not Be Signed  ${image2_short_sha256}
     Logout Harbor
     # delete
     Sign In Harbor  ${HARBOR_URL}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
@@ -498,13 +498,13 @@ Test Case - Replication Triggered By Events
     Repo Not Exist  project${d}  ${image2}
     Go Into Repo  project${d}  ${image1}
     Retry Double Keywords When Error  Delete Accessory  ${tag1}  Should be Accessory deleted  ${tag1}
-    Should Not Be Signed By Cosign  ${tag1}
+    Should Not Be Signed  ${tag1}
     Go Into Repo  project${d}  ${index}
     Retry Double Keywords When Error  Delete Accessory  ${index_tag}  Should be Accessory deleted  ${index_tag}
-    Should Not Be Signed By Cosign  ${index_tag}
+    Should Not Be Signed  ${index_tag}
     Click Index Achieve  ${index_tag}
     Retry Double Keywords When Error  Delete Accessory  ${image1_short_sha256}  Should be Accessory deleted  ${image1_short_sha256}
-    Should Not Be Signed By Cosign  ${image1_short_sha256}
+    Should Not Be Signed  ${image1_short_sha256}
     Logout Harbor
 
     Sign In Harbor  https://${ip1}  ${HARBOR_ADMIN}  ${HARBOR_PASSWORD}
@@ -512,12 +512,12 @@ Test Case - Replication Triggered By Events
     Wait Until Page Contains  We couldn't find any artifacts!
     Go Into Repo  project_dest${d}  ${image1}
     Should be Accessory deleted  ${tag1}
-    Should Not Be Signed By Cosign  ${tag1}
+    Should Not Be Signed  ${tag1}
     Go Into Repo  project_dest${d}  ${index}
     Should be Accessory deleted  ${index_tag}
-    Should Not Be Signed By Cosign  ${index_tag}
+    Should Not Be Signed  ${index_tag}
     Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should be Accessory deleted  ${image1_short_sha256}
-    Should Not Be Signed By Cosign  ${image1_short_sha256}
+    Should Not Be Signed  ${image1_short_sha256}
     Close Browser
 
 Test Case - Enable Replication Of Cosign Deployment Security Policy
@@ -595,15 +595,15 @@ Test Case - Enable Replication Of Cosign Deployment Security Policy
     Repo Exist  project_pull_dest${d}  ${image2}
     Repo Exist  project_pull_dest${d}  ${index}
     Go Into Repo  project_pull_dest${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Go Into Repo  project_pull_dest${d}  ${image2}
-    Should Be Signed By Cosign  ${tag2}
+    Should Be Signed  ${tag2}
     Go Into Repo  project_pull_dest${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project_pull_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project_pull_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image2_short_sha256}
     # check project_push_dest
     Go Into Project  project_push_dest${d}
     Switch To Project Repo
@@ -611,15 +611,15 @@ Test Case - Enable Replication Of Cosign Deployment Security Policy
     Repo Exist  project_push_dest${d}  ${image2}
     Repo Exist  project_push_dest${d}  ${index}
     Go Into Repo  project_push_dest${d}  ${image1}
-    Should Be Signed By Cosign  ${tag1}
+    Should Be Signed  ${tag1}
     Go Into Repo  project_push_dest${d}  ${image2}
-    Should Be Signed By Cosign  ${tag2}
+    Should Be Signed  ${tag2}
     Go Into Repo  project_push_dest${d}  ${index}
-    Should Be Signed By Cosign  ${index_tag}
+    Should Be Signed  ${index_tag}
     Go Into Repo  project_push_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image1_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image1_short_sha256}
     Go Into Repo  project_push_dest${d}  ${index}
-    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed By Cosign  ${image2_short_sha256}
+    Retry Double Keywords When Error  Click Index Achieve  ${index_tag}  Should Be Signed  ${image2_short_sha256}
     Close Browser
 
 Test Case - Carvel Imgpkg Copy To Harbor

From e6900301ce1eef429964492273c2f4ab06f4fb7d Mon Sep 17 00:00:00 2001
From: Wang Yan <wangyan@vmware.com>
Date: Mon, 27 Nov 2023 13:34:17 +0800
Subject: [PATCH 7/8] fix system label resource (#19621)

Make sure robot can use rbac.Resource(label) to access system level label

Signed-off-by: wang yan <wangyan@vmware.com>
---
 src/server/v2.0/handler/label.go | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/src/server/v2.0/handler/label.go b/src/server/v2.0/handler/label.go
index 67a71d41f3c..2c9299cbf6d 100644
--- a/src/server/v2.0/handler/label.go
+++ b/src/server/v2.0/handler/label.go
@@ -23,7 +23,6 @@ import (
 
 	"github.com/goharbor/harbor/src/common"
 	"github.com/goharbor/harbor/src/common/rbac"
-	"github.com/goharbor/harbor/src/common/rbac/system"
 	"github.com/goharbor/harbor/src/controller/project"
 	"github.com/goharbor/harbor/src/lib"
 	"github.com/goharbor/harbor/src/lib/errors"
@@ -193,8 +192,7 @@ func (lAPI *labelAPI) DeleteLabel(ctx context.Context, params operation.DeleteLa
 func (lAPI *labelAPI) requireAccess(ctx context.Context, label *pkg_model.Label, action rbac.Action, subresources ...rbac.Resource) error {
 	switch label.Scope {
 	case common.LabelScopeGlobal:
-		resource := system.NewNamespace().Resource(rbac.ResourceLabel)
-		return lAPI.RequireSystemAccess(ctx, action, resource)
+		return lAPI.RequireSystemAccess(ctx, action, rbac.ResourceLabel)
 	case common.LabelScopeProject:
 		if len(subresources) == 0 {
 			subresources = append(subresources, rbac.ResourceLabel)

From 4fac10a97dbd573d28366c1427e7574d090c3dc6 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 27 Nov 2023 15:48:21 +0800
Subject: [PATCH 8/8] Bump actions/setup-node from 3 to 4 (#19478)

Bumps [actions/setup-node](https://github.com/actions/setup-node) from 3 to 4.
- [Release notes](https://github.com/actions/setup-node/releases)
- [Commits](https://github.com/actions/setup-node/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/setup-node
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Orlix <7236111+OrlinVasilev@users.noreply.github.com>
Co-authored-by: Wang Yan <wangyan@vmware.com>
---
 .github/workflows/CI.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 0d367ec8764..2f151df9c4a 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -317,7 +317,7 @@ jobs:
       - ubuntu-latest
     timeout-minutes: 100
     steps:
-      - uses: actions/setup-node@v3
+      - uses: actions/setup-node@v4
         with:
           node-version: '18'
       - uses: actions/checkout@v3