From fe9571e01deba19093a999a798ffcbded9129480 Mon Sep 17 00:00:00 2001
From: Abhishek Singh <abhiandthetruth@gmail.com>
Date: Wed, 2 Nov 2022 02:28:21 +0530
Subject: [PATCH 1/2] feat: add remote import s3

---
 docs/gl_objects/projects.rst                  | 14 +++++
 gitlab/v4/objects/projects.py                 | 59 +++++++++++++++++++
 tests/functional/api/test_import_export.py    | 20 +++++++
 .../objects/test_project_import_export.py     | 43 +++++++++++++-
 4 files changed, 135 insertions(+), 1 deletion(-)

diff --git a/docs/gl_objects/projects.rst b/docs/gl_objects/projects.rst
index 231aa50a0..23f3c44c2 100644
--- a/docs/gl_objects/projects.rst
+++ b/docs/gl_objects/projects.rst
@@ -352,6 +352,20 @@ Import the project using file stored on a remote URL::
         override_params={'visibility': 'private'},
     )
 
+Import the project using file stored on AWS S3::
+
+    output = gl.projects.remote_import_s3(
+        region="aws-region",
+        bucket_name="aws-bucket-name",
+        file_key="aws-file-key",
+        access_key_id="aws-access-key-id",
+        secret_access_key="secret-access-key",
+        path="my_new_remote_project",
+        name="My New Remote Project", 
+        namespace="my-group",
+        override_params={'visibility': 'private'},
+    )
+
 Project custom attributes
 =========================
 
diff --git a/gitlab/v4/objects/projects.py b/gitlab/v4/objects/projects.py
index 446e4f539..201a08cc4 100644
--- a/gitlab/v4/objects/projects.py
+++ b/gitlab/v4/objects/projects.py
@@ -894,6 +894,65 @@ def remote_import(
             "/projects/remote-import", post_data=data, **kwargs
         )
 
+    def remote_import_s3(
+        self,
+        region: str,
+        bucket_name: str,
+        file_key: str,
+        access_key_id: str,
+        secret_access_key: str,
+        path: str,
+        name: Optional[str] = None,
+        namespace: Optional[str] = None,
+        overwrite: bool = False,
+        override_params: Optional[Dict[str, Any]] = None,
+        **kwargs: Any,
+    ) -> Union[Dict[str, Any], requests.Response]:
+        """Import a project from an archive file stored on AWS S3.
+
+        Args:
+            region: AWS S3 region name where the file is stored
+            bucket_name: AWS S3 bucket name where the file is stored
+            file_key: AWS S3 file key to identify the file.
+            access_key_id: AWS S3 access key ID.
+            secret_access_key: AWS S3 secret access key.
+            path: Name and path for the new project
+            name: The name of the project to import. If not provided,
+                defaults to the path of the project.
+            namespace: The ID or path of the namespace that the project
+                will be imported to
+            overwrite: If True overwrite an existing project with the
+                same path
+            override_params: Set the specific settings for the project
+            **kwargs: Extra options to send to the server (e.g. sudo)
+
+        Raises:
+            GitlabAuthenticationError: If authentication is not correct
+            GitlabListError: If the server failed to perform the request
+
+        Returns:
+            A representation of the import status.
+        """
+        data = {
+            "region": region,
+            "bucket_name": bucket_name,
+            "file_key": file_key,
+            "access_key_id": access_key_id,
+            "secret_access_key": secret_access_key,
+            "path": path,
+            "overwrite": str(overwrite),
+        }
+        if override_params:
+            for k, v in override_params.items():
+                data[f"override_params[{k}]"] = v
+        if name is not None:
+            data["name"] = name
+        if namespace:
+            data["namespace"] = namespace
+        return self.gitlab.http_post(
+            "/projects/remote-import-s3", post_data=data, **kwargs
+        )
+
     def import_bitbucket_server(
         self,
         bitbucket_server_url: str,
diff --git a/tests/functional/api/test_import_export.py b/tests/functional/api/test_import_export.py
index 83cccc69e..ff97205ee 100644
--- a/tests/functional/api/test_import_export.py
+++ b/tests/functional/api/test_import_export.py
@@ -78,3 +78,23 @@ def test_project_remote_import(gl):
         "File url is blocked: Only allowed schemes are https"
         in err_info.value.error_message
     )
+
+
+def test_project_remote_import_s3(gl):
+    gl.features.set("import_project_from_remote_file_s3", True)
+    with pytest.raises(gitlab.exceptions.GitlabHttpError) as err_info:
+        gl.projects.remote_import_s3(
+            "aws-region",
+            "aws-bucket-name",
+            "aws-file-key",
+            "aws-access-key-id",
+            "secret-access-key",
+            "remote-project",
+            "remote-project",
+            "root",
+        )
+    assert err_info.value.response_code == 400
+    assert (
+        "Failed to open 'aws-file-key' in 'aws-bucket-name'"
+        in err_info.value.error_message
+    )
diff --git a/tests/unit/objects/test_project_import_export.py b/tests/unit/objects/test_project_import_export.py
index 72321ec68..e5bbf78f4 100644
--- a/tests/unit/objects/test_project_import_export.py
+++ b/tests/unit/objects/test_project_import_export.py
@@ -53,6 +53,30 @@ def resp_remote_import():
         yield rsps
 
 
+@pytest.fixture
+def resp_remote_import_s3():
+    content = {
+        "id": 1,
+        "description": None,
+        "name": "remote-project-s3",
+        "name_with_namespace": "Administrator / remote-project-s3",
+        "path": "remote-project-s3",
+        "path_with_namespace": "root/remote-project-s3",
+        "created_at": "2018-02-13T09:05:58.023Z",
+        "import_status": "scheduled",
+    }
+
+    with responses.RequestsMock() as rsps:
+        rsps.add(
+            method=responses.POST,
+            url="http://localhost/api/v4/projects/remote-import-s3",
+            json=content,
+            content_type="application/json",
+            status=200,
+        )
+        yield rsps
+
+
 @pytest.fixture
 def resp_import_status():
     content = {
@@ -125,7 +149,24 @@ def test_import_project(gl, resp_import_project):
 
 def test_remote_import(gl, resp_remote_import):
     project_import = gl.projects.remote_import(
-        "https://whatever.com/url", "remote-project", "remote-project", "root"
+        "https://whatever.com/url/file.tar.gz",
+        "remote-project",
+        "remote-project",
+        "root",
+    )
+    assert project_import["import_status"] == "scheduled"
+
+
+def test_remote_import_s3(gl, resp_remote_import_s3):
+    project_import = gl.projects.remote_import_s3(
+        "aws-region",
+        "aws-bucket-name",
+        "aws-file-key",
+        "aws-access-key-id",
+        "secret-access-key",
+        "remote-project",
+        "remote-project",
+        "root",
     )
     assert project_import["import_status"] == "scheduled"
 

From c0f8b01b57ce57b5bcc9848ddd2dfba98fb38b05 Mon Sep 17 00:00:00 2001
From: Abhishek Singh <abhiandthetruth@gmail.com>
Date: Wed, 2 Nov 2022 12:47:16 +0530
Subject: [PATCH 2/2] feat: fix param order, remote import s3

---
 docs/gl_objects/projects.rst                     |  2 +-
 gitlab/v4/objects/projects.py                    | 11 +++++++----
 tests/functional/api/test_import_export.py       |  6 +++---
 tests/unit/objects/test_project_import_export.py |  2 +-
 4 files changed, 12 insertions(+), 9 deletions(-)

diff --git a/docs/gl_objects/projects.rst b/docs/gl_objects/projects.rst
index 23f3c44c2..4b6662b20 100644
--- a/docs/gl_objects/projects.rst
+++ b/docs/gl_objects/projects.rst
@@ -355,12 +355,12 @@ Import the project using file stored on a remote URL::
 Import the project using file stored on AWS S3::
 
     output = gl.projects.remote_import_s3(
+        path="my_new_remote_project",
         region="aws-region",
         bucket_name="aws-bucket-name",
         file_key="aws-file-key",
         access_key_id="aws-access-key-id",
         secret_access_key="secret-access-key",
-        path="my_new_remote_project",
         name="My New Remote Project", 
         namespace="my-group",
         override_params={'visibility': 'private'},
diff --git a/gitlab/v4/objects/projects.py b/gitlab/v4/objects/projects.py
index 201a08cc4..65be58174 100644
--- a/gitlab/v4/objects/projects.py
+++ b/gitlab/v4/objects/projects.py
@@ -807,6 +807,7 @@ class ProjectManager(CRUDMixin, RESTManager):
     def get(self, id: Union[str, int], lazy: bool = False, **kwargs: Any) -> Project:
         return cast(Project, super().get(id=id, lazy=lazy, **kwargs))
 
+    @exc.on_http_error(exc.GitlabImportError)
     def import_project(
         self,
         file: str,
@@ -833,7 +834,7 @@ def import_project(
 
         Raises:
             GitlabAuthenticationError: If authentication is not correct
-            GitlabListError: If the server failed to perform the request
+            GitlabImportError: If the server failed to perform the request
 
         Returns:
             A representation of the import status.
@@ -851,6 +852,7 @@ def import_project(
             "/projects/import", post_data=data, files=files, **kwargs
         )
 
+    @exc.on_http_error(exc.GitlabImportError)
     def remote_import(
         self,
         url: str,
@@ -877,7 +879,7 @@ def remote_import(
 
         Raises:
             GitlabAuthenticationError: If authentication is not correct
-            GitlabListError: If the server failed to perform the request
+            GitlabImportError: If the server failed to perform the request
 
         Returns:
             A representation of the import status.
@@ -894,14 +896,15 @@ def remote_import(
             "/projects/remote-import", post_data=data, **kwargs
         )
 
+    @exc.on_http_error(exc.GitlabImportError)
     def remote_import_s3(
         self,
+        path: str,
         region: str,
         bucket_name: str,
         file_key: str,
         access_key_id: str,
         secret_access_key: str,
-        path: str,
         name: Optional[str] = None,
         namespace: Optional[str] = None,
         overwrite: bool = False,
@@ -928,7 +931,7 @@ def remote_import_s3(
 
         Raises:
             GitlabAuthenticationError: If authentication is not correct
-            GitlabListError: If the server failed to perform the request
+            GitlabImportError: If the server failed to perform the request
 
         Returns:
             A representation of the import status.
diff --git a/tests/functional/api/test_import_export.py b/tests/functional/api/test_import_export.py
index ff97205ee..8f9db9c60 100644
--- a/tests/functional/api/test_import_export.py
+++ b/tests/functional/api/test_import_export.py
@@ -69,7 +69,7 @@ def test_project_import_export(gl, project, temp_dir):
 
 
 def test_project_remote_import(gl):
-    with pytest.raises(gitlab.exceptions.GitlabHttpError) as err_info:
+    with pytest.raises(gitlab.exceptions.GitlabImportError) as err_info:
         gl.projects.remote_import(
             "ftp://whatever.com/url", "remote-project", "remote-project", "root"
         )
@@ -82,15 +82,15 @@ def test_project_remote_import(gl):
 
 def test_project_remote_import_s3(gl):
     gl.features.set("import_project_from_remote_file_s3", True)
-    with pytest.raises(gitlab.exceptions.GitlabHttpError) as err_info:
+    with pytest.raises(gitlab.exceptions.GitlabImportError) as err_info:
         gl.projects.remote_import_s3(
+            "remote-project",
             "aws-region",
             "aws-bucket-name",
             "aws-file-key",
             "aws-access-key-id",
             "secret-access-key",
             "remote-project",
-            "remote-project",
             "root",
         )
     assert err_info.value.response_code == 400
diff --git a/tests/unit/objects/test_project_import_export.py b/tests/unit/objects/test_project_import_export.py
index e5bbf78f4..bfe976fe8 100644
--- a/tests/unit/objects/test_project_import_export.py
+++ b/tests/unit/objects/test_project_import_export.py
@@ -159,13 +159,13 @@ def test_remote_import(gl, resp_remote_import):
 
 def test_remote_import_s3(gl, resp_remote_import_s3):
     project_import = gl.projects.remote_import_s3(
+        "remote-project",
         "aws-region",
         "aws-bucket-name",
         "aws-file-key",
         "aws-access-key-id",
         "secret-access-key",
         "remote-project",
-        "remote-project",
         "root",
     )
     assert project_import["import_status"] == "scheduled"