Skip to content

Commit 3d75323

Browse files
committed
feat: add hosts to generate resume file name
supplementary adjustment for fa92d96
1 parent 64ffaf9 commit 3d75323

File tree

3 files changed

+179
-117
lines changed

3 files changed

+179
-117
lines changed

qiniu/storage/resume.js

+146-93
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ const rpc = require('../rpc');
1717
const { SERVICE_NAME } = require('../httpc/region');
1818
const { ResponseWrapper } = require('../httpc/responseWrapper');
1919
const { Endpoint } = require('../httpc/endpoint');
20+
const { StaticRegionsProvider } = require('../httpc/regionsProvider');
2021
const { EndpointsRetryPolicy } = require('../httpc/endpointsRetryPolicy');
2122
const { RegionsRetryPolicy } = require('../httpc/regionsRetryPolicy');
2223
const { Retrier } = require('../retry');
@@ -86,6 +87,7 @@ function PutExtra (
8687
this.fname = fname || '';
8788
this.params = params || {};
8889
this.mimeType = mimeType || null;
90+
// @deprecated use resumeRecorder and resumeKey instead
8991
this.resumeRecordFile = resumeRecordFile || null;
9092
this.progressCallback = progressCallback || null;
9193
this.partSize = partSize || conf.BLOCK_SIZE;
@@ -100,56 +102,83 @@ function PutExtra (
100102
* @param {Object} options
101103
* @param {string} options.accessKey
102104
* @param {string} options.bucketName
103-
* @param {boolean} [options.retryable]
104-
* @param {'v1' | 'v2' | string} [options.uploadApiVersion]
105-
* @param {JsonFileRecorder} [options.resumeRecorder]
106-
* @param {string} [options.resumeKey]
105+
* @param {string} [options.key]
106+
* @param {string} [options.filePath]
107+
* @param {PutExtra} options.putExtra
108+
*
109+
* @returns Retrier
107110
*/
108111
function _getRegionsRetrier (options) {
109112
const {
110-
bucketName,
111113
accessKey,
112-
retryable = true,
114+
bucketName,
115+
key,
116+
filePath,
113117

114-
uploadApiVersion,
115-
resumeRecorder,
116-
resumeKey
118+
putExtra
117119
} = options;
118120

119121
const preferredScheme = this.config.useHttpsDomain ? 'https' : 'http';
120-
let preferredEndpoints;
121-
const isResumeAvailable = Boolean(resumeRecorder && resumeKey);
122-
if (isResumeAvailable) {
123-
const resumeInfo = resumeRecorder.getSync(resumeKey);
124-
if (resumeInfo && Array.isArray(resumeInfo.upDomains)) {
125-
preferredEndpoints = resumeInfo.upDomains.map(d =>
126-
new Endpoint(d, { defaultScheme: preferredScheme }));
127-
}
122+
123+
let regionsProviderPromise = this.config.getRegionsProvider({
124+
accessKey,
125+
bucketName
126+
});
127+
128+
// generate resume key, if there is a recorder but not resume key
129+
if (putExtra.resumeRecorder && !putExtra.resumeKey) {
130+
regionsProviderPromise = regionsProviderPromise
131+
.then(regionsProvider => regionsProvider.getRegions())
132+
.then(regions => {
133+
if (!regions || !regions.length) {
134+
return Promise.reject(new Error(`no region available for the bucket "${bucketName}"`));
135+
}
136+
const upAccEndpoints = regions[0].services[SERVICE_NAME.UP_ACC] || [];
137+
const upEndpoints = regions[0].services[SERVICE_NAME.UP] || [];
138+
const upHosts = upAccEndpoints.concat(upEndpoints).map(e => e.host);
139+
putExtra.resumeKey = putExtra.resumeRecorder.generateKeySync({
140+
hosts: upHosts,
141+
accessKey: accessKey,
142+
bucketName: bucketName,
143+
key: key,
144+
filePath: filePath,
145+
version: putExtra.version,
146+
partSize: putExtra.partSize
147+
});
148+
return new StaticRegionsProvider(regions);
149+
});
128150
}
129151

130-
return this.config.getRegionsProvider({
131-
bucketName,
132-
accessKey
133-
})
152+
return regionsProviderPromise
134153
.then(regionsProvider => {
154+
// handle preferred endpoints
155+
let preferredEndpoints;
156+
if (putExtra.resumeRecorder && putExtra.resumeKey) {
157+
const resumeInfo = putExtra.resumeRecorder.getSync(putExtra.resumeKey);
158+
if (resumeInfo && Array.isArray(resumeInfo.upDomains)) {
159+
preferredEndpoints = resumeInfo.upDomains.map(d =>
160+
new Endpoint(d, { defaultScheme: preferredScheme }));
161+
}
162+
}
163+
135164
const serviceNames = this.config.accelerateUploading
136165
? [SERVICE_NAME.UP_ACC, SERVICE_NAME.UP]
137166
: [SERVICE_NAME.UP];
138167
const retryPolicies = [
139168
new AccUnavailableRetryPolicy(),
140169
new TokenExpiredRetryPolicy({
141-
uploadApiVersion,
170+
uploadApiVersion: putExtra.version,
142171
recordExistsHandler: () => {
143-
if (!isResumeAvailable) {
172+
if (!putExtra.resumeRecorder || !putExtra.resumeKey) {
144173
return;
145174
}
146-
resumeRecorder.hasSync(resumeKey);
175+
putExtra.resumeRecorder.hasSync(putExtra.resumeKey);
147176
},
148177
recordDeleteHandler: () => {
149-
if (!isResumeAvailable) {
178+
if (!putExtra.resumeRecorder || !putExtra.resumeKey) {
150179
return;
151180
}
152-
resumeRecorder.deleteSync(resumeKey);
181+
putExtra.resumeRecorder.deleteSync(putExtra.resumeKey);
153182
}
154183
}),
155184
new EndpointsRetryPolicy({
@@ -159,10 +188,10 @@ function _getRegionsRetrier (options) {
159188
regionsProvider,
160189
serviceNames,
161190
onChangedRegion: () => {
162-
if (!isResumeAvailable) {
191+
if (!putExtra.resumeRecorder || !putExtra.resumeKey) {
163192
return;
164193
}
165-
resumeRecorder.deleteSync(resumeKey);
194+
putExtra.resumeRecorder.deleteSync(putExtra.resumeKey);
166195
},
167196
preferredEndpoints
168197
})
@@ -175,12 +204,12 @@ function _getRegionsRetrier (options) {
175204
if (context.error.noNeedRetry) {
176205
return false;
177206
}
178-
return retryable;
207+
return true;
179208
}
180209
if (policy instanceof AccUnavailableRetryPolicy) {
181210
return true;
182211
}
183-
return retryable && context.result && context.result.needRetry();
212+
return context.result && context.result.needRetry();
184213
}
185214
});
186215
});
@@ -219,33 +248,39 @@ ResumeUploader.prototype.putStream = function (
219248
}
220249
);
221250

222-
// Why need retrier even if retryable is false?
223-
// Because the retrier is used to get the endpoints,
224-
// which will be initialed by region policy.
225-
const result = _getRegionsRetrier.call(this, {
226-
bucketName: util.getBucketFromUptoken(uploadToken),
227-
accessKey: util.getAKFromUptoken(uploadToken),
228-
retryable: false
251+
const bucketName = util.getBucketFromUptoken(uploadToken);
252+
const accessKey = util.getAKFromUptoken(uploadToken);
229253

230-
// useless by not retryable
231-
// uploadApiVersion: putExtra.version,
254+
const result = this.config.getRegionsProvider({
255+
bucketName,
256+
accessKey
232257
})
233-
.then(retrier => Promise.all([
234-
retrier,
235-
retrier.initContext()
236-
]))
237-
.then(([retrier, context]) => retrier.retry({
238-
func: context => putReq(
239-
context.endpoint,
258+
.then(regionsProvider => regionsProvider.getRegions())
259+
.then(regions => {
260+
if (!regions || !regions.length) {
261+
return Promise.reject(new Error('no region available for the bucket', bucketName));
262+
}
263+
const preferService = this.config.accelerateUploading
264+
? SERVICE_NAME.UP_ACC
265+
: SERVICE_NAME.UP;
266+
if (
267+
!regions[0].services ||
268+
!regions[0].services[preferService] ||
269+
!regions[0].services[preferService].length
270+
) {
271+
return Promise.reject(new Error('no endpoint available for the bucket', bucketName));
272+
}
273+
const endpoint = regions[0].services[preferService][0];
274+
return putReq(
275+
endpoint,
240276
preferredScheme,
241277
uploadToken,
242278
key,
243279
rsStream,
244280
rsStreamLen,
245281
putExtra
246-
),
247-
context
248-
}));
282+
);
283+
});
249284

250285
handleReqCallback(result, callbackFunc);
251286

@@ -838,38 +873,36 @@ ResumeUploader.prototype.putFile = function (
838873
putExtra.fname = path.basename(localFile);
839874
}
840875

841-
const akFromToken = util.getAKFromUptoken(uploadToken);
842-
const bucketFromToken = util.getBucketFromUptoken(uploadToken);
876+
const accessKey = util.getAKFromUptoken(uploadToken);
877+
const bucketName = util.getBucketFromUptoken(uploadToken);
878+
843879
putExtra = getDefaultPutExtra(
844880
putExtra,
845881
{
846-
accessKey: akFromToken,
847-
bucketName: bucketFromToken,
848-
key,
849-
filePath: localFile
882+
key
850883
}
851884
);
852885

853886
const result = _getRegionsRetrier.call(this, {
854-
accessKey: akFromToken,
855-
bucketName: bucketFromToken,
887+
accessKey,
888+
bucketName,
889+
key,
890+
filePath: localFile,
856891

857-
uploadApiVersion: putExtra.version,
858-
resumeRecorder: putExtra.resumeRecorder,
859-
resumeKey: putExtra.resumeKey
892+
putExtra
860893
})
861894
.then(retrier => Promise.all([
862895
retrier,
863896
retrier.initContext()
864897
]))
865898
.then(([retrier, context]) => retrier.retry({
866-
func: context => {
899+
func: ctx => {
867900
const rsStream = fs.createReadStream(localFile, {
868901
highWaterMark: conf.BLOCK_SIZE
869902
});
870903
const rsStreamLen = fs.statSync(localFile).size;
871904
const p = putReq(
872-
context.endpoint,
905+
ctx.endpoint,
873906
preferredScheme,
874907
uploadToken,
875908
key,
@@ -914,10 +947,7 @@ ResumeUploader.prototype.putFileWithoutKey = function (
914947
/**
915948
* @param {PutExtra} putExtra
916949
* @param {Object} options
917-
* @param {string} [options.accessKey]
918-
* @param {string} [options.bucketName]
919950
* @param {string | null} [options.key]
920-
* @param {string} [options.filePath]
921951
* @returns {PutExtra}
922952
*/
923953
function getDefaultPutExtra (putExtra, options) {
@@ -943,30 +973,6 @@ function getDefaultPutExtra (putExtra, options) {
943973
putExtra.resumeKey = parsedPath.name;
944974
}
945975

946-
// generate `resumeKey` if not exists
947-
if (
948-
putExtra.resumeRecorder &&
949-
!putExtra.resumeKey &&
950-
options.filePath &&
951-
options.accessKey &&
952-
options.bucketName
953-
) {
954-
let fileLastModify;
955-
try {
956-
fileLastModify = options.filePath && fs.statSync(options.filePath).mtimeMs.toString();
957-
} catch (_err) {
958-
fileLastModify = '';
959-
}
960-
const recordValuesToHash = [
961-
putExtra.version,
962-
options.accessKey,
963-
`${options.bucketName}:${options.key}`,
964-
options.filePath,
965-
fileLastModify
966-
];
967-
putExtra.resumeKey = putExtra.resumeRecorder.generateKey(recordValuesToHash);
968-
}
969-
970976
return putExtra;
971977
}
972978

@@ -1001,9 +1007,9 @@ JsonFileRecorder.prototype.setSync = function (key, data) {
10011007
* @returns {undefined | Object.<string, any>}
10021008
*/
10031009
JsonFileRecorder.prototype.getSync = function (key) {
1004-
const filePath = path.join(this.baseDirPath, key);
10051010
let result;
10061011
try {
1012+
const filePath = path.join(this.baseDirPath, key);
10071013
const recordContent = fs.readFileSync(
10081014
filePath,
10091015
{
@@ -1018,24 +1024,71 @@ JsonFileRecorder.prototype.getSync = function (key) {
10181024
};
10191025

10201026
JsonFileRecorder.prototype.hasSync = function (key) {
1021-
const filePath = path.join(this.baseDirPath, key);
10221027
try {
1028+
const filePath = path.join(this.baseDirPath, key);
10231029
return fs.existsSync(filePath);
10241030
} catch (_err) {
10251031
return false;
10261032
}
10271033
};
10281034

10291035
JsonFileRecorder.prototype.deleteSync = function (key) {
1030-
const filePath = path.join(this.baseDirPath, key);
10311036
try {
1037+
const filePath = path.join(this.baseDirPath, key);
10321038
fs.unlinkSync(filePath);
10331039
} catch (_err) {
10341040
// pass
10351041
}
10361042
};
10371043

1038-
JsonFileRecorder.prototype.generateKey = function (fields) {
1044+
/**
1045+
* @param {Object} options
1046+
* @param {string[]} options.hosts
1047+
* @param {string} options.accessKey
1048+
* @param {string} options.bucketName
1049+
* @param {string} options.key
1050+
* @param {string} options.filePath
1051+
* @param {string} options.version
1052+
* @param {string} options.partSize
1053+
* @returns {string | undefined}
1054+
*/
1055+
JsonFileRecorder.prototype.generateKeySync = function (options) {
1056+
// if some options not pass in, can't generate a valid key
1057+
if (
1058+
[
1059+
Array.isArray(options.hosts),
1060+
options.accessKey,
1061+
options.bucketName,
1062+
options.key,
1063+
options.filePath,
1064+
options.version,
1065+
options.partSize
1066+
].some(v => !v)
1067+
) {
1068+
return;
1069+
}
1070+
1071+
let fileStats;
1072+
try {
1073+
fileStats = options.filePath && fs.statSync(options.filePath);
1074+
} catch (_err) {
1075+
return;
1076+
}
1077+
1078+
const fields = [
1079+
options.hosts.join(''),
1080+
options.accessKey,
1081+
options.bucketName,
1082+
options.key || '',
1083+
options.filePath,
1084+
fileStats ? fileStats.mtimeMs.toString() : '',
1085+
fileStats ? fileStats.size.toString() : '',
1086+
options.version, // the upload version
1087+
options.version === 'v1'
1088+
? conf.BLOCK_SIZE.toString()
1089+
: options.partSize.toString(),
1090+
'json.v1' // the record file format version
1091+
];
10391092
const h = crypto.createHash('sha1');
10401093
fields.forEach(v => {
10411094
h.update(v);

qiniu/util.js

-4
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,3 @@ exports.prepareZone = function (ctx, accessKey, bucket, callback) {
380380
});
381381
}
382382
};
383-
384-
exports.writeOrCreateSync = function () {
385-
386-
};

0 commit comments

Comments
 (0)