-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathchained-asyn-XMLHttpRequest.ts
171 lines (115 loc) · 4.73 KB
/
chained-asyn-XMLHttpRequest.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
interface I_request {
url: string
method: string
header_map: Map<string, string>
body: string
if_request_successfull(response: object | string): boolean
}
class model {
private request: I_request
protected ui_modify_func: (response: object | string, if_request_successful: boolean) => void
constructor(ui_modify_func: (response: object | string, if_request_successful: boolean) => void, request: I_request) {
this.ui_modify_func = ui_modify_func
this.request = request
}
modify_ui(response: object | string, if_request_successful: boolean): void {
return this.ui_modify_func(response, if_request_successful)
}
get_request(): I_request {
return this.request
}
}
class chained_asyn_requestor {
private model_queue: Array<model>
private max_asyn_nums: number
private abort_flag: boolean
constructor(max_asyn_nums: number) {
this.max_asyn_nums = max_asyn_nums
this.model_queue = new Array<model>()
this.abort_flag = false
}
get_queue_size(): number {
return this.model_queue.length
}
queue_push(item: model): void {
this.model_queue.push(item)
}
private queue_pop(): model {
if (this.model_queue.length > 0) {
return this.model_queue.shift()
}
else {
throw new RangeError("This queue is empty now !")
}
}
private create_xhr(model: model): XMLHttpRequest {
let xhr: XMLHttpRequest
xhr = new XMLHttpRequest()
let req_info = model.get_request()
xhr.open(req_info.method, req_info.url, true)
for (let [key, value] of req_info.header_map) {
xhr.setRequestHeader(key, value)
}
// xhr.responseType = 'json'
let this_instance = this
let callback = function () {
let res = xhr.response
if (model.get_request().if_request_successfull(res) == true) {
model.modify_ui(res, true)
}
else {
model.modify_ui(res, false)
}
if (this_instance.get_queue_size() > 0) {
if (this_instance.abort_flag != true) {
let next_model = this_instance.queue_pop()
let next_xhr = this_instance.create_xhr(next_model)
next_xhr.send(next_model.get_request().body)
}
}
}
xhr.onload = callback
xhr.onerror = callback
return xhr
}
start() {
this.abort_flag = false
for (let i = 0; i < this.max_asyn_nums; i++) {
if (this.get_queue_size() > 0) {
let top_model = this.queue_pop()
this.create_xhr(top_model).send(top_model.get_request().body)
}
else {
break
}
}
}
stop(){
this.abort_flag = true
}
}
// test
// 1 Create a chained_asyn_requestor object, the parameter is a number to control how many asynchronous requests can be sent at the same time.
let chained_asyn_requestor_obj = new chained_asyn_requestor(4)
for (let i = 0; i <= 50; i++) {
// 2 Create a request object that needs to meet interface I_request.
let request: I_request
request = {
url: "http://127.0.0.1:9999/test?id="+String(i),
method: "post",
header_map: new Map<string, string>(),
body: "body:"+String(i),
if_request_successfull: (response: object | string) => { if (response != undefined && response != "") { return true } else { return false } }
}
request.header_map.set("header","header:"+String(i))
// 3 Create model object with 2 parameters.
// parameter1:ui_modify_func : (response: object | string, if_request_successful: boolean) => void
// parameter2:request object : The request object is used to describe some meta-info of a http request like url, method, header, body,
// and a function if_request_successfull(response: object | string): boolean which is used to check response if is expected
let md = new model((res, if_request_successful) => { if(if_request_successful == true){console.log(res)}else{console.log(0-i,res)} }, request)
// 4 Use chained_asyn_requestor.queue_push(model) push model object into the queue of chained_asyn_requestor.
chained_asyn_requestor_obj.queue_push(md)
}
// console.log(chained_asyn_requestor_obj.get_queue_size())
// 5 start
chained_asyn_requestor_obj.start()