29
29
import requests
30
30
import json
31
31
import argparse
32
- import urlparse
33
32
import shutil
34
33
34
+ # support both python 2 & 3
35
+ try :
36
+ import urlparse as urlparser
37
+ except ImportError :
38
+ import urllib .parse as urlparser
39
+
40
+
35
41
from dasc2 .lib .replay_helpers import check_build_version
36
42
37
43
API_BASE_URL = 'https://us.api.battle.net'
@@ -57,7 +63,7 @@ def get_base_url(access_token):
57
63
params = {
58
64
'namespace' : API_NAMESPACE ,
59
65
}
60
- response = requests .get (urlparse .urljoin (API_BASE_URL , "/data/sc2/archive_url/base_url" ), headers = headers ,
66
+ response = requests .get (urlparser .urljoin (API_BASE_URL , "/data/sc2/archive_url/base_url" ), headers = headers ,
61
67
params = params )
62
68
return json .loads (response .text )["base_url" ]
63
69
@@ -69,7 +75,7 @@ def search_by_client_version(access_token, client_version):
69
75
'client_version' : client_version ,
70
76
'_pageSize' : 25
71
77
}
72
- response = requests .get (urlparse .urljoin (API_BASE_URL , "/data/sc2/search/archive" ), headers = headers , params = params )
78
+ response = requests .get (urlparser .urljoin (API_BASE_URL , "/data/sc2/search/archive" ), headers = headers , params = params )
73
79
response = json .loads (response .text )
74
80
meta_urls = []
75
81
for result in response ['results' ]:
@@ -111,19 +117,19 @@ def get_replay_pack(client_version, client_key, client_secret, output_dir, extra
111
117
download_base_url = get_base_url (access_token )
112
118
113
119
# Get meta file infos for the give client version
114
- print 'Searching replay packs with client version=' + client_version
120
+ print ( 'Searching replay packs with client version=' + client_version )
115
121
meta_file_urls = search_by_client_version (access_token , client_version )
116
122
if len (meta_file_urls ) == 0 :
117
- print 'No matching replay packs found for the client version!'
123
+ print ( 'No matching replay packs found for the client version!' )
118
124
return
119
125
120
126
# For each meta file, construct full url to download replay packs
121
- print 'Building urls for downloading replay packs. num_files={0}' .format (len (meta_file_urls ))
127
+ print ( 'Building urls for downloading replay packs. num_files={0}' .format (len (meta_file_urls ) ))
122
128
download_urls = []
123
129
for meta_file_url in meta_file_urls :
124
130
meta_file_info = get_meta_file_info (access_token , meta_file_url )
125
131
file_path = meta_file_info ['path' ]
126
- download_urls .append (urlparse .urljoin (download_base_url , file_path ))
132
+ download_urls .append (urlparser .urljoin (download_base_url , file_path ))
127
133
128
134
129
135
files = []
@@ -141,12 +147,12 @@ def get_replay_pack(client_version, client_key, client_secret, output_dir, extra
141
147
142
148
# Download replay packs.
143
149
for archive_url in sorted_urls :
144
- print 'Downloading replay packs. url=' + archive_url
150
+ print ( 'Downloading replay packs. url=' + archive_url )
145
151
files .append (download_file (archive_url , output_dir ))
146
152
147
153
except Exception as e :
148
154
import traceback
149
- print 'Failed to download replay packs. traceback={}' .format (traceback .format_exc ())
155
+ print ( 'Failed to download replay packs. traceback={}' .format (traceback .format_exc () ))
150
156
151
157
152
158
def parse_args ():
0 commit comments