Skip to content

Commit aa0f6c2

Browse files
Merge pull request #369 from sushobhit-lt/DOT-6255
handle tunnel cases with exec:start
2 parents d13b7b7 + 84d3fcc commit aa0f6c2

File tree

13 files changed

+83
-53
lines changed

13 files changed

+83
-53
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@lambdatest/smartui-cli",
3-
"version": "4.1.31",
3+
"version": "4.1.32",
44
"description": "A command line interface (CLI) to run SmartUI tests on LambdaTest",
55
"files": [
66
"dist/**/*"

src/commander/exec.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ command
4040
ctx.args.execCommand = execCommand
4141
ctx.snapshotQueue = new snapshotQueue(ctx)
4242
ctx.totalSnapshots = 0
43+
ctx.sourceCommand = 'exec'
4344

4445
let tasks = new Listr<Context>(
4546
[

src/commander/server.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ command
2828
ctx.snapshotQueue = new snapshotQueue(ctx);
2929
ctx.totalSnapshots = 0
3030
ctx.isStartExec = true
31+
ctx.sourceCommand = 'exec-start'
3132

3233
let tasks = new Listr<Context>(
3334
[
@@ -52,13 +53,13 @@ command
5253

5354
try {
5455
await tasks.run(ctx);
55-
if (ctx.build && ctx.build.id) {
56-
startPingPolling(ctx, 'exec-start');
56+
if (ctx.build && ctx.build.id && !ctx.autoTunnelStarted) {
57+
startPingPolling(ctx);
5758
}
5859
if (ctx.options.fetchResults && ctx.build && ctx.build.id) {
5960
startPolling(ctx, '', false, '')
6061
}
61-
62+
6263
} catch (error) {
6364
console.error('Error during server execution:', error);
6465
process.exit(1);

src/lib/httpClient.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ export default class httpClient {
7777
(response) => response,
7878
async (error) => {
7979
const { config } = error;
80-
if (config && config.url === '/screenshot' && config.method === 'post') {
80+
if (config && config.url === '/screenshot' && config.method === 'post' && error?.response?.status !== 401) {
8181
// Set default retry count and delay if not already defined
8282
if (!config.retryCount) {
8383
config.retryCount = 0;
@@ -242,11 +242,12 @@ export default class httpClient {
242242
}, log)
243243
}
244244

245-
getScreenshotData(buildId: string, baseline: boolean, log: Logger, projectToken: string) {
245+
getScreenshotData(buildId: string, baseline: boolean, log: Logger, projectToken: string, buildName: string) {
246+
log.debug(`Fetching screenshot data for buildId: ${buildId} having buildName: ${buildName} with baseline: ${baseline}`);
246247
return this.request({
247248
url: '/screenshot',
248249
method: 'GET',
249-
params: { buildId, baseline },
250+
params: { buildId, baseline, buildName },
250251
headers: {projectToken: projectToken}
251252
}, log);
252253
}

src/lib/server.ts

Lines changed: 18 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,7 @@ import fastify, { FastifyInstance, RouteShorthandOptions } from 'fastify';
44
import { readFileSync, truncate } from 'fs'
55
import { Context } from '../types.js'
66
import { validateSnapshot } from './schemaValidation.js'
7-
import { pingIntervalId } from './utils.js';
8-
import { stopTunnelHelper } from './utils.js';
7+
import { pingIntervalId, startPollingForTunnel, stopTunnelHelper, isTunnelPolling } from './utils.js';
98

109
const uploadDomToS3ViaEnv = process.env.USE_LAMBDA_INTERNAL || false;
1110
export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMessage, ServerResponse>> => {
@@ -112,6 +111,7 @@ export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMes
112111
let replyCode: number;
113112
let replyBody: Record<string, any>;
114113
try {
114+
ctx.log.info('Received stop command. Finalizing build ...');
115115
if(ctx.config.delayedUpload){
116116
ctx.log.debug("started after processing because of delayedUpload")
117117
ctx.snapshotQueue?.startProcessingfunc()
@@ -124,6 +124,7 @@ export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMes
124124
}
125125
}, 1000);
126126
})
127+
let buildUrls = `build url: ${ctx.build.url}\n`;
127128

128129
for (const [sessionId, capabilities] of ctx.sessionCapabilitiesMap.entries()) {
129130
try {
@@ -132,9 +133,12 @@ export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMes
132133
const totalSnapshots = capabilities?.snapshotCount || 0;
133134
const sessionBuildUrl = capabilities?.buildURL || '';
134135
const testId = capabilities?.id || '';
135-
136+
ctx.log.debug(`Capabilities for sessionId ${sessionId}: ${JSON.stringify(capabilities)}`)
136137
if (buildId && projectToken) {
137138
await ctx.client.finalizeBuildForCapsWithToken(buildId, totalSnapshots, projectToken, ctx.log);
139+
if (ctx.autoTunnelStarted) {
140+
await startPollingForTunnel(ctx, buildId, false, projectToken, capabilities?.buildName);
141+
}
138142
}
139143

140144
if (testId && buildId) {
@@ -157,10 +161,15 @@ export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMes
157161
}
158162
}
159163

160-
//Handle Tunnel closure
161-
if (ctx.config.tunnel && ctx.config.tunnel?.type === 'auto') {
162-
await stopTunnelHelper(ctx)
163-
}
164+
165+
//If Tunnel Details are present, start polling for tunnel status
166+
if (ctx.tunnelDetails && ctx.tunnelDetails.tunnelHost != "" && ctx.build?.id) {
167+
await startPollingForTunnel(ctx, ctx.build.id, false, '', '');
168+
}
169+
//stop the tunnel if it was auto started and no tunnel polling is active
170+
if (ctx.autoTunnelStarted && isTunnelPolling === null) {
171+
await stopTunnelHelper(ctx);
172+
}
164173

165174
await ctx.browser?.close();
166175
if (ctx.server){
@@ -180,6 +189,8 @@ export default async (ctx: Context): Promise<FastifyInstance<Server, IncomingMes
180189
replyBody = { error: { message: error.message } };
181190
}
182191

192+
ctx.log.info('Stop command processed. Tearing down server.');
193+
183194
// Step 5: Return the response
184195
return reply.code(replyCode).send(replyBody);
185196
});

src/lib/snapshotQueue.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,7 @@ export default class Queue {
380380
useKafkaFlow: resp.data.useKafkaFlow || false,
381381
}
382382
} else {
383-
if (this.ctx.config.tunnel && this.ctx.config.tunnel?.type === 'auto') {
383+
if (this.ctx.autoTunnelStarted) {
384384
await stopTunnelHelper(this.ctx)
385385
}
386386
throw new Error('SmartUI capabilities are missing in env variables or in driver capabilities');

src/lib/uploadAppFigma.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ export default async (ctx: Context): Promise<string> => {
1919
smartIgnore: ctx.config.smartIgnore,
2020
git: ctx.git,
2121
platformType: 'app',
22+
markBaseline: ctx.options.markBaseline,
2223
};
2324

2425
const responseData = await ctx.client.processWebFigma(requestBody, ctx.log);

src/lib/uploadWebFigma.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ export default async (ctx: Context): Promise<string> => {
1818
figma: figmaConfig,
1919
smartIgnore: ctx.config.smartIgnore,
2020
git: ctx.git,
21+
markBaseline: ctx.options.markBaseline,
2122
};
2223

2324
const responseData = await ctx.client.processWebFigma(requestBody, ctx.log);

src/lib/utils.ts

Lines changed: 28 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import fs from 'fs';
77
import { globalAgent } from 'http';
88
import { promisify } from 'util'
99
import { build } from 'tsup';
10+
const util = require('util'); // Import the util module
1011

1112
var lambdaTunnel = require('@lambdatest/node-tunnel');
1213
const sleep = promisify(setTimeout);
@@ -248,9 +249,9 @@ export async function startPolling(ctx: Context, build_id: string, baseline: boo
248249
try {
249250
let resp;
250251
if (build_id) {
251-
resp = await ctx.client.getScreenshotData(build_id, baseline, ctx.log, projectToken);
252+
resp = await ctx.client.getScreenshotData(build_id, baseline, ctx.log, projectToken, '');
252253
} else if (ctx.build && ctx.build.id) {
253-
resp = await ctx.client.getScreenshotData(ctx.build.id, ctx.build.baseline, ctx.log, '');
254+
resp = await ctx.client.getScreenshotData(ctx.build.id, ctx.build.baseline, ctx.log, '', '');
254255
} else {
255256
return;
256257
}
@@ -324,7 +325,7 @@ export async function startPolling(ctx: Context, build_id: string, baseline: boo
324325

325326
export let pingIntervalId: NodeJS.Timeout | null = null;
326327

327-
export async function startPingPolling(ctx: Context, event: string): Promise<void> {
328+
export async function startPingPolling(ctx: Context): Promise<void> {
328329
try {
329330
ctx.log.debug('Sending initial ping to server...');
330331
await ctx.client.ping(ctx.build.id, ctx.log);
@@ -333,12 +334,13 @@ export async function startPingPolling(ctx: Context, event: string): Promise<voi
333334
ctx.log.error(`Error during initial ping: ${error.message}`);
334335
}
335336

337+
let sourceCommand = ctx.sourceCommand? ctx.sourceCommand : '';
336338
// Start the polling interval
337339
pingIntervalId = setInterval(async () => {
338340
try {
339-
ctx.log.debug('Sending ping to server...'+ event);
341+
ctx.log.debug('Sending ping to server... '+ sourceCommand);
340342
await ctx.client.ping(ctx.build.id, ctx.log);
341-
ctx.log.debug('Ping sent successfully.'+ event);
343+
ctx.log.debug('Ping sent successfully. '+ sourceCommand);
342344
} catch (error: any) {
343345
ctx.log.error(`Error during ping polling: ${error.message}`);
344346
}
@@ -406,54 +408,56 @@ export async function startTunnelBinary(ctx: Context) {
406408
}
407409
}
408410

409-
export async function startPollingForTunnel(ctx: Context, build_id: string, baseline: boolean, projectToken: string): Promise<void> {
411+
export let isTunnelPolling: NodeJS.Timeout | null = null;
412+
413+
export async function startPollingForTunnel(ctx: Context, build_id: string, baseline: boolean, projectToken: string, buildName: string): Promise<void> {
414+
if (isTunnelPolling) {
415+
ctx.log.debug('Tunnel polling is already active. Skipping for build_id: ' + build_id);
416+
return;
417+
}
410418
const intervalId = setInterval(async () => {
411419
try {
412420
let resp;
413421
if (build_id) {
414-
resp = await ctx.client.getScreenshotData(build_id, baseline, ctx.log, projectToken);
422+
resp = await ctx.client.getScreenshotData(build_id, baseline, ctx.log, projectToken, buildName);
415423
} else if (ctx.build && ctx.build.id) {
416-
resp = await ctx.client.getScreenshotData(ctx.build.id, ctx.build.baseline, ctx.log, '');
424+
resp = await ctx.client.getScreenshotData(ctx.build.id, ctx.build.baseline, ctx.log, '', '');
417425
} else {
426+
ctx.log.debug('No build information available for polling tunnel status.');
427+
clearInterval(intervalId);
428+
await stopTunnelHelper(ctx);
418429
return;
419430
}
420-
431+
ctx.log.debug(' resp from polling for tunnel status: ' + JSON.stringify(resp));
421432
if (!resp.build) {
422433
ctx.log.info("Error: Build data is null.");
423434
clearInterval(intervalId);
424-
425-
const tunnelRunningStatus = await tunnelInstance.isRunning();
426-
ctx.log.debug('Running status of tunnel before stopping ? ' + tunnelRunningStatus);
427-
428-
const status = await tunnelInstance.stop();
429-
ctx.log.debug('Tunnel is Stopped ? ' + status);
430-
435+
await stopTunnelHelper(ctx);
431436
return;
432437
}
433438

434439
if (resp.build.build_status_ind === constants.BUILD_COMPLETE || resp.build.build_status_ind === constants.BUILD_ERROR) {
435440
clearInterval(intervalId);
436-
437-
const tunnelRunningStatus = await tunnelInstance.isRunning();
438-
ctx.log.debug('Running status of tunnel before stopping ? ' + tunnelRunningStatus);
439-
440-
const status = await tunnelInstance.stop();
441-
ctx.log.debug('Tunnel is Stopped ? ' + status);
441+
await stopTunnelHelper(ctx);
442442
return;
443443
}
444444
} catch (error: any) {
445-
if (error.message.includes('ENOTFOUND')) {
445+
if (error?.message.includes('ENOTFOUND')) {
446446
ctx.log.error('Error: Network error occurred while fetching build status while polling. Please check your connection and try again.');
447447
clearInterval(intervalId);
448448
} else {
449-
ctx.log.error(`Error fetching build status while polling: ${error.message}`);
449+
// Log the error in a human-readable format
450+
ctx.log.debug(util.inspect(error, { showHidden: false, depth: null }));
451+
ctx.log.error(`Error fetching build status while polling: ${JSON.stringify(error)}`);
450452
}
451453
clearInterval(intervalId);
452454
}
453455
}, 5000);
456+
isTunnelPolling = intervalId;
454457
}
455458

456459
export async function stopTunnelHelper(ctx: Context) {
460+
ctx.log.debug('stop-tunnel:: Stopping the tunnel now');
457461
const tunnelRunningStatus = await tunnelInstance.isRunning();
458462
ctx.log.debug('stop-tunnel:: Running status of tunnel before stopping ? ' + tunnelRunningStatus);
459463

src/tasks/createBuildExec.ts

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { ListrTask, ListrRendererFactory } from 'listr2';
22
import { Context } from '../types.js'
33
import chalk from 'chalk';
44
import { updateLogContext } from '../lib/logger.js';
5-
import { startTunnelBinary, startPollingForTunnel, stopTunnelHelper, startPingPolling } from '../lib/utils.js';
5+
import { stopTunnelHelper, startPingPolling } from '../lib/utils.js';
66

77
export default (ctx: Context): ListrTask<Context, ListrRendererFactory, ListrRendererFactory> => {
88
return {
@@ -42,16 +42,11 @@ export default (ctx: Context): ListrTask<Context, ListrRendererFactory, ListrRen
4242
} else {
4343
task.output = chalk.gray(`Empty PROJECT_TOKEN and PROJECT_NAME. Skipping Creation of Build!`)
4444
task.title = 'Skipped SmartUI build creation'
45-
if (ctx.config.tunnel && ctx.config.tunnel?.type === 'auto') {
46-
await stopTunnelHelper(ctx)
47-
}
4845
}
4946

50-
if (ctx.config.tunnel && ctx.config.tunnel?.type === 'auto') {
51-
if (ctx.build && ctx.build.id) {
52-
startPollingForTunnel(ctx, '', false, '');
53-
} else {
54-
startPingPolling(ctx, "tunnel-process");
47+
if (ctx.autoTunnelStarted) {
48+
if (ctx.build && ctx.build.id && ctx.sourceCommand != "exec-start") {
49+
startPingPolling(ctx);
5550
}
5651
}
5752

@@ -69,7 +64,7 @@ export default (ctx: Context): ListrTask<Context, ListrRendererFactory, ListrRen
6964
if (process.env.USE_REMOTE_DISCOVERY === undefined) {
7065
ctx.env.USE_REMOTE_DISCOVERY = true;
7166
process.env.USE_REMOTE_DISCOVERY = 'true';
72-
task.output += chalk.gray(`\n Using remote discovery by deafult for this build`);
67+
task.output += chalk.gray(`\n Using remote discovery by default for this build`);
7368
}
7469
ctx.log.debug(`USE_REMOTE_DISCOVERY is set to ${ctx.env.USE_REMOTE_DISCOVERY}`);
7570

@@ -85,7 +80,7 @@ export default (ctx: Context): ListrTask<Context, ListrRendererFactory, ListrRen
8580
}
8681
} catch (error: any) {
8782
ctx.log.debug(error);
88-
if (ctx.config.tunnel && ctx.config.tunnel?.type === 'auto') {
83+
if (ctx.autoTunnelStarted) {
8984
await stopTunnelHelper(ctx)
9085
}
9186
task.output = chalk.gray(error.message);

0 commit comments

Comments
 (0)