Skip to content

Commit

Permalink
get master (#11823)
Browse files Browse the repository at this point in the history
* [HDInsight] Fix hdi test failure (#11806)

* Initial generation Synapse autorest v5

* Fix empty model generation

* Fix Test Failure:
Skip 3 test case: test_create_with_adlsgen1, test_create_with_additional_storage, test_oms_on_running_cluster
Rename test_http_extend to test_gateway_setting for http settings is
replaced with gateway settings

Co-authored-by: Laurent Mazuel <laurent.mazuel@gmail.com>
Co-authored-by: Zhenyu Zhou <zhezhou@microsoft.com>

* disable some by design bandit warnings (#11495)

* disable some by design bandit warnings

* Packaging update of azure-mgmt-datalake-analytics

Co-authored-by: Azure SDK Bot <aspysdk2@microsoft.com>

* Increment package version after release of azure_core (#11795)

* Use subject claim as home_account_id when no client_info (#11639)

* Refactor ClientCertificateCredential to use AadClient (#11719)

* Refactor ClientSecretCredential to use AadClient (#11718)

* [Cosmos] Fixed incorrect ID type error (#11798)

* Fixed incorrect ID type error

* Fix pylint

* [text analytics] Update readme (#11796)

* try something (#11797)

* Search refactoring 3 (#11804)

* create_or_update takes object

* rename is_hidden to hidden

* fix types

* updates

Co-authored-by: aim-for-better <zhenyu.zhou@microsoft.com>
Co-authored-by: Laurent Mazuel <laurent.mazuel@gmail.com>
Co-authored-by: Zhenyu Zhou <zhezhou@microsoft.com>
Co-authored-by: Azure SDK Bot <aspysdk2@microsoft.com>
Co-authored-by: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com>
Co-authored-by: Charles Lowell <chlowe@microsoft.com>
Co-authored-by: annatisch <antisch@microsoft.com>
Co-authored-by: iscai-msft <43154838+iscai-msft@users.noreply.github.com>
Co-authored-by: Krista Pratico <krpratic@microsoft.com>
  • Loading branch information
10 people authored Jun 4, 2020
1 parent 902db93 commit 119e4e8
Show file tree
Hide file tree
Showing 67 changed files with 4,717 additions and 3,885 deletions.
3 changes: 3 additions & 0 deletions sdk/core/azure-core/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@

# Release History

## 1.6.1 (Unreleased)


## 1.6.0 (2020-06-03)

### Bug fixes
Expand Down
2 changes: 1 addition & 1 deletion sdk/core/azure-core/azure/core/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@
# regenerated.
# --------------------------------------------------------------------------

VERSION = "1.6.0"
VERSION = "1.6.1"
5 changes: 5 additions & 0 deletions sdk/cosmos/azure-cosmos/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
## 4.0.1 (Unreleased)

- Fixed error raised when a non string ID is used in an item. It now raises TypeError rather than AttributeError. Issue 11793 - thank you @Rabbit994.


## 4.0.0 (2020-05-20)

- Stable release.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from urllib3.util.retry import Retry
from azure.core.paging import ItemPaged # type: ignore
from azure.core import PipelineClient # type: ignore
from azure.core.exceptions import raise_with_traceback # type: ignore
from azure.core.pipeline.policies import ( # type: ignore
HTTPPolicy,
ContentDecodePolicy,
Expand Down Expand Up @@ -2480,11 +2481,14 @@ def __CheckAndUnifyQueryFormat(self, query_body):
def __ValidateResource(resource):
id_ = resource.get("id")
if id_:
if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1:
raise ValueError("Id contains illegal chars.")

if id_[-1] == " ":
raise ValueError("Id ends with a space.")
try:
if id_.find("/") != -1 or id_.find("\\") != -1 or id_.find("?") != -1 or id_.find("#") != -1:
raise ValueError("Id contains illegal chars.")

if id_[-1] == " ":
raise ValueError("Id ends with a space.")
except AttributeError:
raise_with_traceback(TypeError, message="Id type must be a string.")

# Adds the partition key to options
def _AddPartitionKey(self, collection_link, document, options):
Expand Down
2 changes: 1 addition & 1 deletion sdk/cosmos/azure-cosmos/azure/cosmos/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

VERSION = "4.0.0"
VERSION = "4.0.1"
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
# https://docs.microsoft.com/azure/cosmos-db/create-sql-api-python#create-a-database-account
#
# 2. Microsoft Azure Cosmos
# pip install azure-cosmos==4.0.0
# pip install azure-cosmos>=4.0.0
# ----------------------------------------------------------------------------------------------------------
# Sample - how to get and use resource token that allows restricted access to data
# ----------------------------------------------------------------------------------------------------------
Expand Down
21 changes: 13 additions & 8 deletions sdk/cosmos/azure-cosmos/test/test_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def test_sql_query_crud(self):
self.assertEqual(0, len(databases), 'Unexpected number of query results.')

# query with a string.
databases = list(self.client.query_databases('SELECT * FROM root r WHERE r.id="' + db2.id + '"'))
databases = list(self.client.query_databases('SELECT * FROM root r WHERE r.id="' + db2.id + '"')) #nosec
self.assertEqual(1, len(databases), 'Unexpected number of query results.')
self.client.delete_database(db1.id)
self.client.delete_database(db2.id)
Expand Down Expand Up @@ -507,30 +507,30 @@ def test_partitioned_collection_document_crud_and_query(self):
# query document on the partition key specified in the predicate will pass even without setting enableCrossPartitionQuery or passing in the partitionKey value
documentlist = list(created_collection.query_items(
{
'query': 'SELECT * FROM root r WHERE r.id=\'' + replaced_document.get('id') + '\''
'query': 'SELECT * FROM root r WHERE r.id=\'' + replaced_document.get('id') + '\'' #nosec
}))
self.assertEqual(1, len(documentlist))

# query document on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
try:
list(created_collection.query_items(
{
'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\''
'query': 'SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'' #nosec
}))
except Exception:
pass

# cross partition query
documentlist = list(created_collection.query_items(
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'',
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'', #nosec
enable_cross_partition_query=True
))

self.assertEqual(1, len(documentlist))

# query document by providing the partitionKey value
documentlist = list(created_collection.query_items(
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'',
query='SELECT * FROM root r WHERE r.key=\'' + replaced_document.get('key') + '\'', #nosec
partition_key=replaced_document.get('id')
))

Expand Down Expand Up @@ -746,14 +746,14 @@ def test_partitioned_collection_conflict_crud_and_query(self):
# query conflicts on any property other than partitionKey will fail without setting enableCrossPartitionQuery or passing in the partitionKey value
try:
list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get( #nosec
'resourceType') + '\''
))
except Exception:
pass

conflictlist = list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'',
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'', #nosec
enable_cross_partition_query=True
))

Expand All @@ -762,7 +762,7 @@ def test_partitioned_collection_conflict_crud_and_query(self):
# query conflicts by providing the partitionKey value
options = {'partitionKey': conflict_definition.get('id')}
conflictlist = list(created_collection.query_conflicts(
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'',
query='SELECT * FROM root r WHERE r.resourceType=\'' + conflict_definition.get('resourceType') + '\'', #nosec
partition_key=conflict_definition['id']
))

Expand Down Expand Up @@ -939,6 +939,11 @@ def test_document_upsert(self):
self.assertEqual(created_document['id'],
document_definition['id'])

# test error for non-string id
with pytest.raises(TypeError):
document_definition['id'] = 7
created_collection.upsert_item(body=document_definition)

# read documents after creation and verify updated count
documents = list(created_collection.read_all_items())
self.assertEqual(
Expand Down
4 changes: 2 additions & 2 deletions sdk/cosmos/azure-cosmos/test/test_globaldb.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,15 +89,15 @@ def setUp(self):
self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey)

# Create the test database only when it's not already present
query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'')
query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'') #nosec
it = iter(query_iterable)

self.test_db = next(it, None)
if self.test_db is None:
self.test_db = self.client.CreateDatabase({'id' : Test_globaldb_tests.test_database_id})

# Create the test collection only when it's not already present
query_iterable = self.client.QueryContainers(self.test_db['_self'], 'SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_collection_id + '\'')
query_iterable = self.client.QueryContainers(self.test_db['_self'], 'SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_collection_id + '\'') #nosec
it = iter(query_iterable)

self.test_coll = next(it, None)
Expand Down
2 changes: 1 addition & 1 deletion sdk/cosmos/azure-cosmos/test/test_multi_orderby.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def test_multi_orderby_queries(self):
where_string = "WHERE root." + self.NUMBER_FIELD + " % 2 = 0" if has_filter else ""
query = "SELECT " + top_string + " [" + select_item_builder + "] " + \
"FROM root " + where_string + " " + \
"ORDER BY " + orderby_item_builder
"ORDER BY " + orderby_item_builder #nosec

expected_ordered_list = self.top(self.sort(self.filter(self.items, has_filter), composite_index, invert), has_top, top_count)

Expand Down
8 changes: 4 additions & 4 deletions sdk/cosmos/azure-cosmos/test/test_orderby.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def test_orderby_top_query(self):

# an order by query with top, total existing docs more than requested top count
query = {
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count #nosec
}

def get_order_by_key(r):
Expand All @@ -186,7 +186,7 @@ def test_orderby_top_query_less_results_than_top_counts(self):

# an order by query with top, total existing docs less than requested top count
query = {
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count
'query': 'SELECT top %d * FROM root r order by r.spam' % top_count #nosec
}

def get_order_by_key(r):
Expand Down Expand Up @@ -226,7 +226,7 @@ def test_top_query(self):

# a top query, the results will be sorted based on the target partition key range
query = {
'query': 'SELECT top %d * FROM root r' % len(expected_ordered_ids)
'query': 'SELECT top %d * FROM root r' % len(expected_ordered_ids) #nosec
}
self.execute_query_and_validate_results(query, expected_ordered_ids)

Expand Down Expand Up @@ -260,7 +260,7 @@ def test_top_query_as_string(self):
expected_ordered_ids = [d['id'] for d in first_two_ranges_results]

# a top query, the results will be sorted based on the target partition key range
query = 'SELECT top %d * FROM root r' % len(expected_ordered_ids)
query = 'SELECT top %d * FROM root r' % len(expected_ordered_ids) #nosec
self.execute_query_and_validate_results(query, expected_ordered_ids)

def test_parametrized_top_query(self):
Expand Down
18 changes: 9 additions & 9 deletions sdk/cosmos/azure-cosmos/test/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,55 +349,55 @@ def test_distinct(self):
padded_docs = self._pad_with_none(documents, distinct_field)

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field),
query='SELECT distinct c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True),
is_select=False,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field),
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, pk_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, pk_field, distinct_field), distinct_field, pk_field, True),
is_select=False,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field),
query='SELECT distinct c.%s, c.%s from c ORDER BY c.%s, c.%s' % (distinct_field, pk_field, distinct_field, pk_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, pk_field), distinct_field, pk_field, True),
is_select=False,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field),
query='SELECT distinct value c.%s from c ORDER BY c.%s' % (distinct_field, distinct_field), #nosec
results=self._get_distinct_docs(self._get_order_by_docs(padded_docs, distinct_field, None), distinct_field, None, True),
is_select=False,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c' % (distinct_field),
query='SELECT distinct c.%s from c' % (distinct_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, None, False),
is_select=True,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field),
query='SELECT distinct c.%s, c.%s from c' % (distinct_field, pk_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, pk_field, False),
is_select=True,
fields=[distinct_field, pk_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct value c.%s from c' % (distinct_field),
query='SELECT distinct value c.%s from c' % (distinct_field), #nosec
results=self._get_distinct_docs(padded_docs, distinct_field, None, True),
is_select=True,
fields=[distinct_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field),
query='SELECT distinct c.%s from c ORDER BY c.%s' % (different_field, different_field), #nosec
results=[],
is_select=True,
fields=[different_field])

self._validate_distinct(created_collection=created_collection,
query='SELECT distinct c.%s from c' % (different_field),
query='SELECT distinct c.%s from c' % (different_field), #nosec
results=['None'],
is_select=True,
fields=[different_field])
Expand Down
33 changes: 12 additions & 21 deletions sdk/datalake/azure-mgmt-datalake-analytics/README.md
Original file line number Diff line number Diff line change
@@ -1,30 +1,21 @@
## Microsoft Azure SDK for Python
# Microsoft Azure SDK for Python

This is the Microsoft Azure Data Lake Analytics Management Client
Library.
This is the Microsoft Azure Data Lake Analytics Management Client Library.
This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8.
For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/)

Azure Resource Manager (ARM) is the next generation of management APIs
that replace the old Azure Service Management (ASM).

This package has been tested with Python 2.7, 3.5, 3.6 and 3.7.
# Usage

For the older Azure Service Management (ASM) libraries, see
[azure-servicemanagement-legacy](https://pypi.python.org/pypi/azure-servicemanagement-legacy)
library.

For a more complete set of Azure libraries, see the
[azure](https://pypi.python.org/pypi/azure) bundle package.

## Usage

For code examples, see [Data Lake Analytics
Management](https://docs.microsoft.com/python/api/overview/azure/data-lake-analytics)
For code examples, see [Data Lake Analytics Management](https://docs.microsoft.com/python/api/overview/azure/data-lake-analytics)
on docs.microsoft.com.

## Provide Feedback

If you encounter any bugs or have suggestions, please file an issue in
the [Issues](https://github.com/Azure/azure-sdk-for-python/issues)
# Provide Feedback

If you encounter any bugs or have suggestions, please file an issue in the
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
section of the project.

![image](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-datalake-analytics%2FREADME.png)

![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-datalake-analytics%2FREADME.png)
5 changes: 4 additions & 1 deletion sdk/datalake/azure-mgmt-datalake-analytics/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@
pass

# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)

Expand Down Expand Up @@ -67,6 +69,7 @@
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def setUp(self):
)
AS
T(a, b);
END;""".format(self.db_name, self.table_name, self.tvf_name, self.view_name, self.proc_name)
END;""".format(self.db_name, self.table_name, self.tvf_name, self.view_name, self.proc_name) #nosec

# define all the job IDs to be used during execution
if self.is_playback():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def __init__(self, **kwargs): # pylint: disable=unused-argument

@staticmethod
def get_content_md5(data):
md5 = hashlib.md5()
md5 = hashlib.md5() #nosec
if isinstance(data, bytes):
md5.update(data)
elif hasattr(data, 'read'):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ def __init__(self, **kwargs): # pylint: disable=unused-argument

@staticmethod
def get_content_md5(data):
md5 = hashlib.md5()
md5 = hashlib.md5() #nosec
if isinstance(data, bytes):
md5.update(data)
elif hasattr(data, 'read'):
Expand Down
1 change: 1 addition & 0 deletions sdk/hdinsight/azure-mgmt-hdinsight/dev_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@
-e ../../core/azure-core
-e ../../keyvault/azure-keyvault-keys
-e ../../keyvault/azure-mgmt-keyvault
-e ../../loganalytics/azure-mgmt-loganalytics
-e ../../resources/azure-mgmt-msi/
Loading

0 comments on commit 119e4e8

Please sign in to comment.