diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..e2fb354 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,24 @@ +# A single CI script with github workflow +name: Build wheels and deploy + +on: + workflow_dispatch: + push: + merge_group: + release: + types: + - published + +jobs: + build: + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.9, '3.10', '3.11'] + uses: qiboteam/workflows/.github/workflows/deploy-pip-poetry.yml@main + with: + os: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} + publish: ${{ github.event_name == 'release' && github.event.action == 'published' && matrix.os == 'ubuntu-latest' && matrix.python-version == '3.10' }} + poetry-extras: "--with dev" + secrets: inherit diff --git a/.github/workflows/unit_test.yml b/.github/workflows/unit_test.yml new file mode 100644 index 0000000..8e424f7 --- /dev/null +++ b/.github/workflows/unit_test.yml @@ -0,0 +1,22 @@ +name: Tests + +on: + workflow_dispatch: + push: + merge_group: + pull_request: + types: [labeled] + +jobs: + build: + if: contains(github.event.pull_request.labels.*.name, 'run-workflow') || github.event_name == 'push' + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.9, '3.10', '3.11'] + uses: qiboteam/workflows/.github/workflows/rules-poetry.yml@main + with: + os: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} + poetry-extras: "--with dev" + secrets: inherit diff --git a/.gitignore b/.gitignore index 66614b1..de63fdd 100644 --- a/.gitignore +++ b/.gitignore @@ -164,4 +164,4 @@ examples/token.txt examples/results/ # configuration variables -src/tiiq_provider/.env +src/tii_qrc_provider/.env diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..2d641a9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +ci: + autofix_prs: true +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: debug-statements + - repo: https://github.com/psf/black + rev: 23.11.0 + hooks: + - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + args: ["--profile", "black"] + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.0 + hooks: + - id: pyupgrade + - repo: https://github.com/hadialqattan/pycln + rev: v2.4.0 + hooks: + - id: pycln + args: [--config=pyproject.toml] diff --git a/README.md b/README.md index b0db89d..dfa110a 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ Please, sign up to [this link](http://http://login.qrccluster.com:8010/) to obtain the needed token to run computations on the cluster. The following snippet provides a basic usage example. -Replace the `your-tiiq-token` string with your user token received during the +Replace the `your-tii-qrc-token` string with your user token received during the registration process. ```python @@ -40,7 +40,7 @@ from qibo_tii_provider import TIIProvider circuit = qibo.models.QFT(5) # authenticate to server through the client instance -token = "your-tiiq-token" +token = "your-tii-qrc-token" client = TIIProvider(token) # run the circuit diff --git a/examples/run_error_job.py b/examples/run_error_job.py index c033fce..5f28689 100644 --- a/examples/run_error_job.py +++ b/examples/run_error_job.py @@ -1,11 +1,12 @@ import qibo + from qibo_tii_provider import TiiProvider # create the circuit you want to run circuit = qibo.models.QFT(11) # read the token from file -with open("token.txt", "r") as f: +with open("token.txt") as f: token = f.read() # authenticate to server through the client instance diff --git a/examples/run_successful_job.py b/examples/run_successful_job.py index 8efe925..0f79dff 100644 --- a/examples/run_successful_job.py +++ b/examples/run_successful_job.py @@ -1,11 +1,12 @@ import qibo + from qibo_tii_provider import TIIProvider # create the circuit you want to run circuit = qibo.models.QFT(5) # read the token from file -with open("token.txt", "r") as f: +with open("token.txt") as f: token = f.read() # authenticate to server through the client instance diff --git a/poetry.lock b/poetry.lock index d6127d8..355e55b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,18 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "astroid" +version = "3.0.2" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, + {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "certifi" @@ -128,6 +142,17 @@ numpy = "*" constrained-solution-tracking = ["moarchiving"] plotting = ["matplotlib"] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "contourpy" version = "1.1.1" @@ -199,6 +224,73 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.4.1)", "types-Pill test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "wurlitzer"] +[[package]] +name = "coverage" +version = "7.3.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d874434e0cb7b90f7af2b6e3309b0733cde8ec1476eb47db148ed7deeb2a9494"}, + {file = "coverage-7.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6621dccce8af666b8c4651f9f43467bfbf409607c604b840b78f4ff3619aeb"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1367aa411afb4431ab58fd7ee102adb2665894d047c490649e86219327183134"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f0f8f0c497eb9c9f18f21de0750c8d8b4b9c7000b43996a094290b59d0e7523"}, + {file = "coverage-7.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0338c4b0951d93d547e0ff8d8ea340fecf5885f5b00b23be5aa99549e14cfd"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d31650d313bd90d027f4be7663dfa2241079edd780b56ac416b56eebe0a21aab"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9437a4074b43c177c92c96d051957592afd85ba00d3e92002c8ef45ee75df438"}, + {file = "coverage-7.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9e17d9cb06c13b4f2ef570355fa45797d10f19ca71395910b249e3f77942a837"}, + {file = "coverage-7.3.3-cp310-cp310-win32.whl", hash = "sha256:eee5e741b43ea1b49d98ab6e40f7e299e97715af2488d1c77a90de4a663a86e2"}, + {file = "coverage-7.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:593efa42160c15c59ee9b66c5f27a453ed3968718e6e58431cdfb2d50d5ad284"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c944cf1775235c0857829c275c777a2c3e33032e544bcef614036f337ac37bb"}, + {file = "coverage-7.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eda7f6e92358ac9e1717ce1f0377ed2b9320cea070906ece4e5c11d172a45a39"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c854c1d2c7d3e47f7120b560d1a30c1ca221e207439608d27bc4d08fd4aeae8"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:222b038f08a7ebed1e4e78ccf3c09a1ca4ac3da16de983e66520973443b546bc"}, + {file = "coverage-7.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff4800783d85bff132f2cc7d007426ec698cdce08c3062c8d501ad3f4ea3d16c"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc200cec654311ca2c3f5ab3ce2220521b3d4732f68e1b1e79bef8fcfc1f2b97"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:307aecb65bb77cbfebf2eb6e12009e9034d050c6c69d8a5f3f737b329f4f15fb"}, + {file = "coverage-7.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ffb0eacbadb705c0a6969b0adf468f126b064f3362411df95f6d4f31c40d31c1"}, + {file = "coverage-7.3.3-cp311-cp311-win32.whl", hash = "sha256:79c32f875fd7c0ed8d642b221cf81feba98183d2ff14d1f37a1bbce6b0347d9f"}, + {file = "coverage-7.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:243576944f7c1a1205e5cd658533a50eba662c74f9be4c050d51c69bd4532936"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a2ac4245f18057dfec3b0074c4eb366953bca6787f1ec397c004c78176a23d56"}, + {file = "coverage-7.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9191be7af41f0b54324ded600e8ddbcabea23e1e8ba419d9a53b241dece821d"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c0b1b8b5a4aebf8fcd227237fc4263aa7fa0ddcd4d288d42f50eff18b0bac4"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee453085279df1bac0996bc97004771a4a052b1f1e23f6101213e3796ff3cb85"}, + {file = "coverage-7.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1191270b06ecd68b1d00897b2daddb98e1719f63750969614ceb3438228c088e"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:007a7e49831cfe387473e92e9ff07377f6121120669ddc39674e7244350a6a29"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:af75cf83c2d57717a8493ed2246d34b1f3398cb8a92b10fd7a1858cad8e78f59"}, + {file = "coverage-7.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:811ca7373da32f1ccee2927dc27dc523462fd30674a80102f86c6753d6681bc6"}, + {file = "coverage-7.3.3-cp312-cp312-win32.whl", hash = "sha256:733537a182b5d62184f2a72796eb6901299898231a8e4f84c858c68684b25a70"}, + {file = "coverage-7.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:e995efb191f04b01ced307dbd7407ebf6e6dc209b528d75583277b10fd1800ee"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbd8a5fe6c893de21a3c6835071ec116d79334fbdf641743332e442a3466f7ea"}, + {file = "coverage-7.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:50c472c1916540f8b2deef10cdc736cd2b3d1464d3945e4da0333862270dcb15"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e9223a18f51d00d3ce239c39fc41410489ec7a248a84fab443fbb39c943616c"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f501e36ac428c1b334c41e196ff6bd550c0353c7314716e80055b1f0a32ba394"}, + {file = "coverage-7.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475de8213ed95a6b6283056d180b2442eee38d5948d735cd3d3b52b86dd65b92"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afdcc10c01d0db217fc0a64f58c7edd635b8f27787fea0a3054b856a6dff8717"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fff0b2f249ac642fd735f009b8363c2b46cf406d3caec00e4deeb79b5ff39b40"}, + {file = "coverage-7.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a1f76cfc122c9e0f62dbe0460ec9cc7696fc9a0293931a33b8870f78cf83a327"}, + {file = "coverage-7.3.3-cp38-cp38-win32.whl", hash = "sha256:757453848c18d7ab5d5b5f1827293d580f156f1c2c8cef45bfc21f37d8681069"}, + {file = "coverage-7.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad2453b852a1316c8a103c9c970db8fbc262f4f6b930aa6c606df9b2766eee06"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b15e03b8ee6a908db48eccf4e4e42397f146ab1e91c6324da44197a45cb9132"}, + {file = "coverage-7.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:89400aa1752e09f666cc48708eaa171eef0ebe3d5f74044b614729231763ae69"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c59a3e59fb95e6d72e71dc915e6d7fa568863fad0a80b33bc7b82d6e9f844973"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ede881c7618f9cf93e2df0421ee127afdfd267d1b5d0c59bcea771cf160ea4a"}, + {file = "coverage-7.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3bfd2c2f0e5384276e12b14882bf2c7621f97c35320c3e7132c156ce18436a1"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7f3bad1a9313401ff2964e411ab7d57fb700a2d5478b727e13f156c8f89774a0"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:65d716b736f16e250435473c5ca01285d73c29f20097decdbb12571d5dfb2c94"}, + {file = "coverage-7.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a702e66483b1fe602717020a0e90506e759c84a71dbc1616dd55d29d86a9b91f"}, + {file = "coverage-7.3.3-cp39-cp39-win32.whl", hash = "sha256:7fbf3f5756e7955174a31fb579307d69ffca91ad163467ed123858ce0f3fd4aa"}, + {file = "coverage-7.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cad9afc1644b979211989ec3ff7d82110b2ed52995c2f7263e7841c846a75348"}, + {file = "coverage-7.3.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:d299d379b676812e142fb57662a8d0d810b859421412b4d7af996154c00c31bb"}, + {file = "coverage-7.3.3.tar.gz", hash = "sha256:df04c64e58df96b4427db8d0559e95e2df3138c9916c96f9f6a4dd220db2fdb7"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "cycler" version = "0.12.1" @@ -214,6 +306,34 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "dill" +version = "0.3.7" +description = "serialize all of Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "fonttools" version = "4.44.0" @@ -308,6 +428,31 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "joblib" version = "1.3.2" @@ -501,6 +646,17 @@ pyparsing = ">=2.3.1" python-dateutil = ">=2.7" setuptools_scm = ">=7" +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -633,6 +789,36 @@ files = [ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "psutil" version = "5.9.6" @@ -661,6 +847,35 @@ files = [ [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +[[package]] +name = "pylint" +version = "3.0.3" +description = "python code static checker" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, + {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, +] + +[package.dependencies] +astroid = ">=3.0.1,<=3.1.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pyparsing" version = "3.1.1" @@ -675,6 +890,46 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -856,6 +1111,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tomlkit" +version = "0.12.3" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, +] + [[package]] name = "typing-extensions" version = "4.8.0" @@ -902,4 +1168,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.12" -content-hash = "52b51c198504f144d7775cc4d2e91f24bc90b4be56adcd9538704458c76c8a9f" +content-hash = "50db5074b7fa12d380bdb916c5bb9119f1acc11f71002786c5868e809b5da86a" diff --git a/pyproject.toml b/pyproject.toml index d49e528..e82212d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,3 +23,25 @@ packages = [{ include = "qibo_tii_provider", from = "src" }] python = ">=3.8,<3.12" qibo = ">=0.2.2" requests = "^2.31.0" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.3" +pytest-cov = "^4.1.0" +pylint = "^3.0.3" + +[tool.poe.tasks] +test = "pytest" +lint = "pylint --errors-only src" +lint-warnings = "pylint --exit-zero src" + +[tool.pylint.reports] +output-format = "colorized" + +[tool.pytest.ini_options] +testpaths = ['tests/'] +filterwarnings = ['ignore::RuntimeWarning'] +addopts = [ + '--cov=src/qibo_tii_provider', + '--cov-report=xml', + '--cov-report=html', +] diff --git a/src/qibo_tii_provider/__init__.py b/src/qibo_tii_provider/__init__.py index 664a1be..ab9e664 100644 --- a/src/qibo_tii_provider/__init__.py +++ b/src/qibo_tii_provider/__init__.py @@ -1 +1,2 @@ -from qibo_tii_provider.tiiprovider import TIIProvider +"""The `qibo_tii_provider` package""" +from qibo_tii_provider.tii_qrc_provider import TIIProvider diff --git a/src/qibo_tii_provider/config.py b/src/qibo_tii_provider/config.py new file mode 100644 index 0000000..cf938f3 --- /dev/null +++ b/src/qibo_tii_provider/config.py @@ -0,0 +1,23 @@ +"""This module implements some constants and custom exceptions""" + + +class MalformedResponseError(Exception): + """Exception raised when server responsed body does not contain expected keys""" + + def __init__( + self, message="Server response body does not contain all the expected keys" + ): + self.message = message + super().__init__(self.message) + + +class JobPostServerError(Exception): + """Exception raised when server fails to post the job to the queue. + + The client should handle such error to aknowledge that job submission was + not successful without crashing. + """ + + def __init__(self, message="Server failed to post job to queue"): + self.message = message + super().__init__(self.message) diff --git a/src/qibo_tii_provider/tii_qrc_provider.py b/src/qibo_tii_provider/tii_qrc_provider.py new file mode 100644 index 0000000..aa73d0e --- /dev/null +++ b/src/qibo_tii_provider/tii_qrc_provider.py @@ -0,0 +1,257 @@ +"""The module implementing the TIIProvider class.""" +import logging +import os +import tarfile +import tempfile +import time +from pathlib import Path +from typing import Iterable, List, Optional + +import numpy as np +import qibo +import requests + +from .config import JobPostServerError, MalformedResponseError + +QRCCLUSTER_IP = os.environ.get("QRCCLUSTER_IP", "login.qrccluster.com") +QRCCLUSTER_PORT = os.environ.get("QRCCLUSTER_PORT", "8010") +RESULTS_BASE_FOLDER = os.environ.get("RESULTS_BASE_FOLDER", "/tmp/qibo_tii_provider") +SECONDS_BETWEEN_CHECKS = os.environ.get("SECONDS_BETWEEN_CHECKS", 2) + +BASE_URL = f"http://{QRCCLUSTER_IP}:{QRCCLUSTER_PORT}/" + +RESULTS_BASE_FOLDER = Path(RESULTS_BASE_FOLDER) +RESULTS_BASE_FOLDER.mkdir(exist_ok=True) + +TIMEOUT = 10 + + +# configure logger +logging.basicConfig(format="[%(asctime)s] %(levelname)s: %(message)s") +logger = logging.getLogger(__name__) +LOGGING_LEVEL = logging.INFO +logger.setLevel(LOGGING_LEVEL) + + +def wait_for_response_to_get_request(url: str) -> requests.models.Response: + """Wait until the server completes the computation and return the response. + + :param url: the endpoint to make the request + :type url: str + + :return: the response of the get request + :rtype: requests.models.Response + """ + while True: + response = requests.get(url, timeout=TIMEOUT) + if response.content == b"Job still in progress": + time.sleep(SECONDS_BETWEEN_CHECKS) + continue + return response + + +def _write_stream_to_tmp_file(stream: Iterable) -> Path: + """Write chunk of bytes to temporary file. + + The tmp_path should be closed manually. + + :param stream: the stream of bytes chunks to be saved on disk + :type stream: Iterable + + :return: the name of the tempo + + """ + with tempfile.NamedTemporaryFile(delete=False) as tmp_file: + for chunk in stream: + if chunk: + tmp_file.write(chunk) + archive_path = tmp_file.name + return Path(archive_path) + + +def _extract_archive_to_folder(source_archive: Path, destination_folder: Path): + with tarfile.open(source_archive, "r:gz") as archive: + archive.extractall(destination_folder) + + +def _save_and_unpack_stream_response_to_folder(stream: Iterable, results_folder: Path): + """Save the stream to a given folder. + + Internally, save the stream to a temporary archive and extract its contents + to the target folder. + + :param stream: the iterator containing the response content + :type stream: Iterable + :param results_folder: the local path to the results folder + :type results_folder: Path + """ + archive_path = _write_stream_to_tmp_file(stream) + + _extract_archive_to_folder(archive_path, results_folder) + + # clean up temporary file + archive_path.unlink() + + +def check_response_has_keys(response: requests.models.Response, keys: List[str]): + """Check that the response body contains certain keys. + + + :raises MalformedResponseError: if the server response does not contain all + the expected keys + """ + response_keys = set(response.json().keys()) + expected_keys = set(keys) + missing_keys = expected_keys.difference(response_keys) + + if len(missing_keys): + raise MalformedResponseError( + f"The server response is missing the following keys: {' '.join(missing_keys)}" + ) + + +class TIIProvider: + """Class to manage the interaction with the QRC cluster.""" + + def __init__(self, token: str): + """ + :param token: the authentication token associated to the webapp user + :type: str + """ + self.token = token + + self.pid = None + self.results_folder = None + self.results_path = None + + self.check_client_server_qibo_versions() + + def check_client_server_qibo_versions(self): + """Check that client and server qibo package installed versions match. + + Raise assertion error if the two versions are not the same. + """ + url = BASE_URL + "qibo_version/" + response = requests.get(url, timeout=TIMEOUT) + response.raise_for_status() + check_response_has_keys(response, ["qibo_version"]) + qibo_server_version = response.json()["qibo_version"] + qibo_local_version = qibo.__version__ + + msg = ( + "Local Qibo package version does not match the server one, please " + f"upgrade: {qibo_local_version} -> {qibo_server_version}" + ) + assert qibo_local_version == qibo_server_version, msg + + def run_circuit( + self, circuit: qibo.Circuit, nshots: int = 1000, device: str = "sim" + ) -> Optional[np.ndarray]: + """Run circuit on the cluster. + + List of available devices: + + - sim + - iqm5q + - spinq10q + - tii1q_b1 + - qw25q_gold + - tiidc + - tii2q + - tii2q1 + - tii2q2 + - tii2q3 + - tii2q4 + + :param circuit: the QASM representation of the circuit to run + :type circuit: Circuit + :param nshots: + :type nshots: int + :param device: the device to run the circuit on. Default device is `sim` + :type device: str + + :return: the numpy array with the results of the computation. None if + the job raised an error. + :rtype: np.ndarray + """ + # post circuit to server + logger.info("Post new circuit on the server") + + try: + self._post_circuit(circuit, nshots, device) + except JobPostServerError as err: + logger.error(err.message) + return None + + # retrieve results + logger.info("Job posted on server with pid %s", self.pid) + logger.info("Check results every %d seconds ...", SECONDS_BETWEEN_CHECKS) + result = self._get_result() + + return result + + def _post_circuit( + self, circuit: qibo.Circuit, nshots: int = 100, device: str = "sim" + ): + # HTTP request + url = BASE_URL + "run_circuit/" + payload = { + "token": self.token, + "circuit": circuit.raw, + "nshots": nshots, + "device": device, + } + response = requests.post(url, json=payload, timeout=TIMEOUT) + + # checks + response.raise_for_status() + check_response_has_keys(response, ["pid", "message"]) + + # save the response + response_content = response.json() + self.pid = response_content["pid"] + + if self.pid is None: + raise JobPostServerError(response_content["message"]) + + def _get_result(self) -> Optional[np.ndarray]: + """Send requests to server checking whether the job is completed. + + This function populates the `TIIProvider.results_folder` and + `TIIProvider.results_path` attributes. + + :return: the numpy array with the results of the computation. None if + the job raised an error. + :rtype: Optional[np.ndarray] + """ + url = BASE_URL + f"get_result/{self.pid}/" + response = wait_for_response_to_get_request(url) + + # create the job results folder + self.results_folder = RESULTS_BASE_FOLDER / self.pid + self.results_folder.mkdir(exist_ok=True) + + # Save the stream to disk + try: + _save_and_unpack_stream_response_to_folder( + response.iter_content(), self.results_folder + ) + except tarfile.ReadError as err: + logger.error("Catched tarfile ReadError: %s", err) + logger.error( + "The received file is not a valid gzip " + "archive, the result might have to be inspected manually. Find " + "the file at `%s`", + self.results_folder.as_posix(), + ) + return None + + if response.headers["Job-Status"].lower() == "error": + logger.info( + "Job exited with error, check logs in %s folder", + self.results_folder.as_posix(), + ) + return None + + self.results_path = self.results_folder / "results.npy" + return qibo.result.load_result(self.results_path) diff --git a/src/qibo_tii_provider/tiiprovider.py b/src/qibo_tii_provider/tiiprovider.py deleted file mode 100644 index db2230d..0000000 --- a/src/qibo_tii_provider/tiiprovider.py +++ /dev/null @@ -1,184 +0,0 @@ -import json -from pathlib import Path -import tarfile -import tempfile -import time -from typing import Iterable, Optional -import os - -import numpy as np -import qibo -import requests - - -QRCCLUSTER_IP=os.environ.get("QRCCLUSTER_IP", "login.qrccluster.com") -QRCCLUSTER_PORT=os.environ.get("QRCCLUSTER_PORT", "8010") -RESULTS_BASE_FOLDER=os.environ.get("RESULTS_BASE_FOLDER", "/tmp/qibo_tii_provider") -SECONDS_BETWEEN_CHECKS=os.environ.get("SECONDS_BETWEEN_CHECKS", 2) - -BASE_URL = f"http://{QRCCLUSTER_IP}:{QRCCLUSTER_PORT}/" - -RESULTS_BASE_FOLDER = Path(RESULTS_BASE_FOLDER) -RESULTS_BASE_FOLDER.mkdir(exist_ok=True) - -SECONDS_BETWEEN_CHECKS = SECONDS_BETWEEN_CHECKS - - -def _write_stream_response_to_folder(stream: Iterable, results_folder: Path): - """Save the stream to a given folder. - - Internally, save the stream to a temporary archive and extract its contents - to the target folder. - - :param stream: the iterator containing the response content - :type stream: Iterable - :param results_folder: the local path to the results folder - :type results_folder: Path - """ - # save archive to tempfile - with tempfile.NamedTemporaryFile(delete=False) as archive: - for chunk in stream: - if chunk: - archive.write(chunk) - archive_path = archive.name - - # extract archive content to target directory - with tarfile.open(archive_path, "r") as archive: - archive.extractall(results_folder) - - os.remove(archive_path) - - -class TIIProvider: - """Class to manage the interaction with the QRC cluster.""" - - def __init__(self, token: str): - """ - :param token: the authentication token associated to the webapp user - :type: str - """ - self.token = token - - self.check_client_server_qibo_versions() - - def check_client_server_qibo_versions(self): - """Check that client and server qibo package installed versions match. - - Raise assertion error if the two versions are not the same. - """ - url = BASE_URL + "qibo_version/" - response = requests.get(url) - assert ( - response.status_code == 200 - ), f"Failed to send the request to the server, response {response.status_code}" - qibo_server_version = json.loads(response.content)["qibo_version"] - qibo_local_version = qibo.__version__ - - assert ( - qibo_local_version == qibo_server_version - ), f"Local Qibo package version does not match the server one, please upgrade: {qibo_local_version} -> {qibo_server_version}" - - def run_circuit( - self, circuit: qibo.Circuit, nshots: int = 1000, device: str = "sim" - ) -> Optional[np.ndarray]: - """Run circuit on the cluster. - - List of available devices: - - - sim - - iqm5q - - spinq10q - - tii1q_b1 - - qw25q_gold - - tiidc - - tii2q - - tii2q1 - - tii2q2 - - tii2q3 - - tii2q4 - - :param circuit: the QASM representation of the circuit to run - :type circuit: Circuit - :param nshots: - :type nshots: int - :param device: the device to run the circuit on. Default device is `sim` - :type device: str - - :return: the numpy array with the results of the computation. None if - the job raised an error. - :rtype: np.ndarray - """ - # post circuit to server - print("Post new circuit on the server") - self.__post_circuit(circuit, nshots, device) - - # retrieve results - print(f"Job posted on server with pid {self.pid}") - print(f"Check results every {SECONDS_BETWEEN_CHECKS} seconds ...") - result = self.__get_result() - - return result - - def __post_circuit( - self, circuit: qibo.Circuit, nshots: int = 100, device: str = "sim" - ): - payload = { - "token": self.token, - "circuit": circuit.raw, - "nshots": nshots, - "device": device, - } - url = BASE_URL + "run_circuit/" - - # post circuit - try: - # Send an HTTP request to the server - response = requests.post(url, json=payload) - - # the response should contain the PID to be checked (in the db, store - # an hashed version of the pid, not the actual value) - - # Check the response - if response.status_code == 200: - response_content = json.loads(response.content) - self.pid = response_content["pid"] - return response_content["message"] - else: - return "Error. Failed to send the request to the server" - - except Exception as e: - return f"Error. An error occurred: {str(e)}" - - def __get_result(self) -> Optional[np.ndarray]: - """Send requests to server checking whether the job is completed. - - This function populates the `TIIProvider.result_folder` and - `TIIProvider.result_path` attributes. - - :return: the numpy array with the results of the computation. None if - the job raised an error. - :rtype: Optional[np.ndarray] - """ - url = BASE_URL + f"get_result/{self.pid}" - while True: - time.sleep(SECONDS_BETWEEN_CHECKS) - response = requests.get(url) - - if response.content == b"Job still in progress": - continue - - # create the job results folder - self.result_folder = RESULTS_BASE_FOLDER / self.pid - self.result_folder.mkdir(exist_ok=True) - - # Save the stream to disk - _write_stream_response_to_folder( - response.iter_content(), self.result_folder - ) - - if response.headers["Job-Status"].lower() == "error": - print(f"Job exited with error, check logs in {self.result_folder.as_posix()} folder") - return None - - self.result_path = self.result_folder / "results.npy" - return qibo.result.load_result(self.result_path) diff --git a/tests/test_tii_qrc_provider.py b/tests/test_tii_qrc_provider.py new file mode 100644 index 0000000..b7657be --- /dev/null +++ b/tests/test_tii_qrc_provider.py @@ -0,0 +1,339 @@ +import tarfile +from pathlib import Path +from typing import Callable +from unittest.mock import Mock, patch + +import pytest +import utils_test_tii_qrc_provider as utils +from requests.exceptions import HTTPError + +from qibo_tii_provider import tii_qrc_provider +from qibo_tii_provider.config import JobPostServerError, MalformedResponseError + +PKG = "qibo_tii_provider.tii_qrc_provider" +LOCAL_URL = "http://localhost:8000/" +FAKE_QIBO_VERSION = "0.0.1" +FAKE_PID = "123" +ARCHIVE_NAME = "file.tar.gz" +TIMEOUT = 1 + + +@pytest.fixture(autouse=True) +def mock_qrccluster_ip(): + """Ensure that all the requests are made on localhost""" + with patch(f"{PKG}.BASE_URL", LOCAL_URL) as _fixture: + yield _fixture + + +@pytest.fixture(autouse=True) +def mock_qibo(): + """Ensure that all the requests are made on localhost""" + with patch(f"{PKG}.qibo") as _mock_qibo: + _mock_qibo.__version__ = FAKE_QIBO_VERSION + _mock_qibo.result.load_result.side_effect = lambda x: x + yield _mock_qibo + + +@pytest.fixture(scope="module", autouse=True) +def mock_timeout(): + """Ensure that all the requests are made on localhost""" + with patch(f"{PKG}.TIMEOUT", TIMEOUT) as _fixture: + yield _fixture + + +@pytest.fixture +def results_base_folder(tmp_path: Path): + results_base_folder = tmp_path / "results" + results_base_folder.mkdir() + with patch(f"{PKG}.RESULTS_BASE_FOLDER", results_base_folder): + yield results_base_folder + + +def _get_request_side_effect(job_status: str = "success") -> Callable: + """Return a callable mock for the get request function + + Job status parameter controls the response header of `get_result/{pid}` + endpoint. + + :param job_status: the Job-Status header of the mocked response + :type job_status: str + + :return: the get request side effect function + :rtype: Callable + """ + + def _request_side_effect(url, timeout): + if url == LOCAL_URL + "qibo_version/": + return utils.MockedResponse( + status_code=200, + json_data={"qibo_version": FAKE_QIBO_VERSION}, + ) + if url == LOCAL_URL + f"get_result/{FAKE_PID}/": + stream, _, _ = utils.get_in_memory_fake_archive_stream() + json_data = { + "content": None, + "iter_content": stream, + "headers": {"Job-Status": job_status}, + } + return utils.MockedResponse(status_code=200, json_data=json_data) + + return _request_side_effect + + +def _post_request_side_effect(url, json, timeout): + if url == LOCAL_URL + "run_circuit/": + json_data = {"pid": FAKE_PID, "message": "Success. Job posted"} + return utils.MockedResponse(status_code=200, json_data=json_data) + + +@pytest.fixture +def mock_request(): + with patch(f"{PKG}.requests") as _mock_request: + _mock_request.get.side_effect = _get_request_side_effect() + _mock_request.post.side_effect = _post_request_side_effect + yield _mock_request + + +@pytest.fixture +def archive_path(tmp_path): + return tmp_path / ARCHIVE_NAME + + +@pytest.fixture +def mock_tempfile(archive_path): + with patch(f"{PKG}.tempfile") as _mock_tempfile: + _mock_tempfile.NamedTemporaryFile = utils.get_fake_tmp_file_class(archive_path) + yield _mock_tempfile + + +def test_check_response_has_keys(): + """Check response body contains the keys""" + keys = ["key1", "key2"] + json_data = {"key1": 0, "key2": 1} + status_code = 200 + mock_response = utils.MockedResponse(status_code, json_data) + tii_qrc_provider.check_response_has_keys(mock_response, keys) + + +def test_check_response_has_missing_keys(): + """Check response body contains the keys""" + keys = ["key1", "key2"] + json_data = {"key1": 0} + status_code = 200 + mock_response = utils.MockedResponse(status_code, json_data) + with pytest.raises(MalformedResponseError): + tii_qrc_provider.check_response_has_keys(mock_response, keys) + + +def _get_tii_client(): + return tii_qrc_provider.TIIProvider("valid_token") + + +def test_check_client_server_qibo_versions_with_version_match(mock_request: Mock): + _get_tii_client() + mock_request.get.assert_called_once_with( + LOCAL_URL + "qibo_version/", timeout=TIMEOUT + ) + + +def test_check_client_server_qibo_versions_with_version_mismatch(mock_request: Mock): + remote_qibo_version = "0.2.2" + + def _new_side_effect(url, timeout): + return utils.MockedResponse( + status_code=200, json_data={"qibo_version": remote_qibo_version} + ) + + mock_request.get.side_effect = _new_side_effect + + with pytest.raises(AssertionError): + _get_tii_client() + + mock_request.get.assert_called_once_with( + LOCAL_URL + "qibo_version/", timeout=TIMEOUT + ) + + +def test__post_circuit_with_invalid_token(mock_request: Mock): + def _new_side_effect(url, json, timeout): + return utils.MockedResponse(status_code=404) + + mock_request.post.side_effect = _new_side_effect + + client = _get_tii_client() + with pytest.raises(HTTPError): + client._post_circuit(utils.MockedCircuit()) + + +def test__post_circuit_not_successful(mock_request: Mock): + def _new_side_effect(url, json, timeout): + json_data = {"pid": None, "message": "post job to queue failed"} + return utils.MockedResponse(status_code=200, json_data=json_data) + + mock_request.post.side_effect = _new_side_effect + + client = _get_tii_client() + with pytest.raises(JobPostServerError): + client._post_circuit(utils.MockedCircuit()) + + +def test__run_circuit_with_unsuccessful_post_to_queue(mock_request: Mock): + def _new_side_effect(url, json, timeout): + json_data = {"pid": None, "message": "post job to queue failed"} + return utils.MockedResponse(status_code=200, json_data=json_data) + + mock_request.post.side_effect = _new_side_effect + + client = _get_tii_client() + return_value = client.run_circuit(utils.MockedCircuit()) + + assert return_value is None + + +def test_wait_for_response_to_get_request(mock_request: Mock): + failed_attempts = 3 + url = "http://example.url" + + keep_waiting = utils.MockedResponse( + status_code=200, json_data={"content": b"Job still in progress"} + ) + job_done = utils.MockedResponse(status_code=200) + + mock_request.get.side_effect = [keep_waiting] * failed_attempts + [job_done] + + with patch(f"{PKG}.SECONDS_BETWEEN_CHECKS", 1e-4): + tii_qrc_provider.wait_for_response_to_get_request(url) + + assert mock_request.get.call_count == failed_attempts + 1 + + +def test__write_stream_to_tmp_file_with_simple_text_stream( + mock_tempfile: Mock, archive_path: Path +): + """ + The test contains the following checks: + + - a new temporary file is created to a specific direction + - the content of the temporary file contains equals the one given + """ + stream = [b"line1\n", b"line2\n"] + + assert not archive_path.is_file() + + result_path = tii_qrc_provider._write_stream_to_tmp_file(stream) + + assert result_path == archive_path + assert result_path.is_file() + assert result_path.read_bytes() == b"".join(stream) + + +def test__write_stream_to_tmp_file(mock_tempfile: Mock, archive_path: Path): + """ + The test contains the following checks: + + - a new temporary file is created to a specific direction + - the content of the temporary file contains equals the one given + """ + stream, members, members_contents = utils.get_in_memory_fake_archive_stream() + + assert not archive_path.is_file() + + result_path = tii_qrc_provider._write_stream_to_tmp_file(stream) + + assert result_path == archive_path + assert result_path.is_file() + + # load the archive in memory and check that the members and the contents + # match with the expected ones + with tarfile.open(result_path, "r:gz") as archive: + result_members = sorted(archive.getnames()) + assert result_members == members + for member, member_content in zip(members, members_contents): + with archive.extractfile(member) as result_member: + result_content = result_member.read() + assert result_content == member_content + + +def test__extract_archive_to_folder_with_non_archive_input(tmp_path): + file_path = tmp_path / "file.txt" + file_path.write_text("test content") + + with pytest.raises(tarfile.ReadError): + tii_qrc_provider._extract_archive_to_folder(file_path, tmp_path) + + +@patch( + f"{PKG}._save_and_unpack_stream_response_to_folder", utils.raise_tarfile_readerror +) +def test__get_result_handles_tarfile_readerror(mock_request, results_base_folder): + file_path = results_base_folder / "file.txt" + file_path.write_text("test content") + + client = _get_tii_client() + result = client.run_circuit(utils.MockedCircuit()) + + assert result is None + + +def test__extract_archive_to_folder(archive_path, results_base_folder): + members, members_contents = utils.create_fake_archive(archive_path) + + tii_qrc_provider._extract_archive_to_folder(archive_path, results_base_folder) + + result_members = [] + result_members_contents = [] + for member_path in sorted(results_base_folder.iterdir()): + result_members.append(member_path.name) + result_members_contents.append(member_path.read_bytes()) + + assert result_members == members + assert result_members_contents == members_contents + + +def test__save_and_unpack_stream_response_to_folder( + mock_tempfile: Mock, archive_path: Path, results_base_folder: Path +): + stream, _, _ = utils.get_in_memory_fake_archive_stream() + + assert not archive_path.is_file() + + tii_qrc_provider._save_and_unpack_stream_response_to_folder( + stream, results_base_folder + ) + + # the archive should have been removed + assert not archive_path.is_file() + + +def test__get_result(mock_qibo, mock_request, mock_tempfile, results_base_folder): + expected_array_path = results_base_folder / FAKE_PID / "results.npy" + + client = _get_tii_client() + client.pid = FAKE_PID + result = client._get_result() + + mock_qibo.result.load_result.assert_called_once_with(expected_array_path) + assert result == expected_array_path + + +def test__get_result_with_job_status_error( + mock_qibo, mock_request, mock_tempfile, results_base_folder +): + mock_request.get.side_effect = _get_request_side_effect(job_status="error") + + client = _get_tii_client() + client.pid = FAKE_PID + result = client._get_result() + + mock_qibo.result.load_result.assert_not_called() + assert result is None + + +def test__run_circuit(mock_qibo, mock_request, mock_tempfile, results_base_folder): + expected_array_path = results_base_folder / FAKE_PID / "results.npy" + + client = _get_tii_client() + client.pid = FAKE_PID + result = client.run_circuit(utils.MockedCircuit()) + + assert result == expected_array_path diff --git a/tests/utils_test_tii_qrc_provider.py b/tests/utils_test_tii_qrc_provider.py new file mode 100644 index 0000000..249bb62 --- /dev/null +++ b/tests/utils_test_tii_qrc_provider.py @@ -0,0 +1,137 @@ +import io +import tarfile +from pathlib import Path +from typing import Dict, Generator, List, Optional, Tuple + +from requests.exceptions import HTTPError + + +class MockedResponse: + """A fake representation of a `requests.response` object""" + + def __init__(self, status_code: int, json_data: Optional[Dict] = None): + self.status_code = status_code + self._json = json_data or {} + self.headers = self._json.get("headers") + self.content = self._json.get("content") + self._iter_content = self._json.get("iter_content") + + def json(self) -> Dict: + return self._json + + def iter_content(self): + return self._iter_content + + def raise_for_status(self): + if 400 <= self.status_code < 500: + http_error_msg = f"{self.status_code} Client Error" + + elif 500 <= self.status_code < 600: + http_error_msg = f"{self.status_code} Server Error" + else: + http_error_msg = "" + + if http_error_msg: + raise HTTPError(http_error_msg) + + +class MockedCircuit: + """A fake representation of a Qibo quantum circuit""" + + def __init__(self): + self.raw = "raw circuit representation" + + +class FakeStreamingHttpResponse: + """A fake representation of Django StreamingHttpResponse""" + + def __init__(self, tar_gz_bytes): + self.tar_gz_bytes = tar_gz_bytes + + def __iter__(self): + # Create a tarfile object from the bytes stream + tar_stream = io.BytesIO(self.tar_gz_bytes) + with tarfile.open(fileobj=tar_stream, mode="r:gz") as tar: + for tar_info in tar: + # Yield each byte of the file's content + with tar.extractfile(tar_info) as file: + while byte := file.read(1): + yield byte + + +def _generic_create_archive_(get_file_context_manager_fn): + members = ["member1.txt", "member2.txt"] + members_contents = [ + b"This is the content of member1.txt.", + b"This is the content of member2.txt.", + ] + + with get_file_context_manager_fn() as tar: + for member, contents in zip(members, members_contents): + member_info = tarfile.TarInfo(member) + member_info.size = len(contents) + tar.addfile(member_info, io.BytesIO(contents)) + + return members, members_contents + + +def create_fake_archive(archive_path: Path) -> Tuple[List[str], List[bytes]]: + """Create a .tar.gz archive with fake members and + + + :param archive_path: the destination path for the archive + :type archive_path: Path + + :return: the list with the archive file members + :rtype: List[str] + :return: the list with the contents of each archive file member + :rtype: List[bytes] + """ + members, members_contents = _generic_create_archive_( + lambda: tarfile.open(archive_path, "w:gz") + ) + return members, members_contents + + +def create_in_memory_fake_archive() -> Tuple[bytes, List[str], List[bytes]]: + with io.BytesIO() as buffer: + members, members_contents = _generic_create_archive_( + lambda: tarfile.open(fileobj=buffer, mode="w:gz") + ) + archive_as_bytes = buffer.getvalue() + return archive_as_bytes, members, members_contents + + +class DataStreamer: + def __init__(self, data: bytes, chunk_size: int = 128): + self.data = data + self.chunk_size = chunk_size + self.size = len(data) + + def __iter__(self) -> Generator[None, bytes, None]: + for i in range(0, len(self.data), self.chunk_size): + yield self.data[i : i + self.chunk_size] + + +def get_in_memory_fake_archive_stream(): + archive_as_bytes, members, members_contents = create_in_memory_fake_archive() + return DataStreamer(archive_as_bytes), members, members_contents + + +def get_fake_tmp_file_class(file_path: Path): + class TmpFile: + def __init__(self, *args, **kwargs): + pass + + def __enter__(self): + self.opened_file = open(file_path, "wb") + return self.opened_file + + def __exit__(self, exc_type, exc_value, exc_tb): + self.opened_file.close() + + return TmpFile + + +def raise_tarfile_readerror(*args): + raise tarfile.ReadError()