Potential deadlock detected: workflow goroutine 'root' didn't yield for over a second

Hi Team, we’re using Go as workers and encountering the following issue:
“Potential deadlock detected: workflow goroutine ‘root’ didn’t yield for over a second.”
This is happening quite frequently in production but not during testing or debugging.

We’ve confirmed that our workflow code does not contain blocking operations. Based on similar topics from others, we haven’t observed any signs of high CPU utilization or CPU throttling.

Below, I’ve included our workflow code and the stack trace from the panic. We hope to gain some insights or suggestions to resolve this issue. Your help is greatly appreciated!

Workflow code
func SyncGoogleSKU(ctx workflow.Context, req workflowDef.SyncGoogleSKURequest) (*workflowDef.SyncGoogleSKUResponse, error) {
	var (
		res           *activityDef.SyncGoogleSKUResponse
		signalReq     signal.SyncGoogleSKURequest
		continueAsNew bool
		err           error
	)

	signalChan := workflow.GetSignalChannel(ctx, signal.SyncGoogleSKU)
	for signalChan.ReceiveAsync(&signalReq) {
	}

	if err := workflow.ExecuteActivity(workflow.WithActivityOptions(ctx, defaultSyncActivityOpts), activityDef.TypeSyncGoogleSKU,
		activityDef.SyncGoogleSKURequest{
			BizName:   req.BizName,
			ProductID: req.ProductID,
			AppID:     req.AppID,
		}).Get(ctx, &res); err != nil {
		return nil, err
	}

	ctxCancel, cancel := workflow.WithCancel(ctx)
	timer := workflow.NewTimer(ctxCancel, 3*time.Hour)

	selector := workflow.NewSelector(ctx)

	selector.AddFuture(timer, func(_ workflow.Future) {
		var res *activityDef.MigrateBasePlanResponse
		activityOps := defaultSyncActivityOpts
		activityOps.StartToCloseTimeout = time.Minute
		if activityErr := workflow.ExecuteActivity(workflow.WithActivityOptions(ctx, activityOps), activityDef.TypeMigrateGoogleBasePlan,
			activityDef.MigrateBasePlanRequest{
				BizName:   req.BizName,
				ProductID: req.ProductID,
				AppID:     req.AppID,
			},
		).Get(ctx, &res); activityErr != nil {
			err = activityErr
		}
	})

	selector.AddReceive(signalChan, func(ch workflow.ReceiveChannel, _ bool) {
		ch.Receive(ctx, nil)

		cancel()
		continueAsNew = true
	})

	selector.Select(ctx)

	for signalChan.ReceiveAsync(&signalReq) {
		continueAsNew = true
	}
	if continueAsNew {
		return nil, workflow.NewContinueAsNewError(ctx, workflowDef.TypeSyncGoogleSKU, req)
	}

	if err != nil {
		return nil, err
	}

	return &workflowDef.SyncGoogleSKUResponse{}, nil
}
Stack trace

I’ve highlights workflow code in the stack trace,
line 61: selector.Select(ctx)
line 43: if activityErr := workflow.ExecuteActivity(workflow.WithActivityOptions(ctx, activityOps), activityDef.TypeMigrateGoogleBasePlan,

Workflow panic%!(EXTRA string=Namespace, string=catalog_channel_sync, string=TaskQueue, string=google-sync-queue.v1, string=WorkerID, string=53@dp-1f18d8838c-8485c7f9cc-c6vkh@, string=WorkflowType, string=workflow.sync.google_sku, string=WorkflowID, string=SUBSCRIPTION_GP_7307106886602935041_1180, string=RunID, string=c1bbc74c-de6a-4f69-a331-5699c7c4f3dd, string=Attempt, int32=1, string=Error, *internal.workflowPanicError=[TMPRL1101] Potential deadlock detected: workflow goroutine "root" didn't yield for over a second, string=StackTrace, string=coroutine root [running]:
internal/poll.runtime_Semacquire(0x490dd0?)
	/usr/local/go/src/runtime/sema.go:67 +0x25
internal/poll.(*fdMutex).rwlock(0xc0000a8c60, 0xa9?)
	/usr/local/go/src/internal/poll/fd_mutex.go:154 +0xc5
internal/poll.(*FD).writeLock(...)
	/usr/local/go/src/internal/poll/fd_mutex.go:239
internal/poll.(*FD).Write(0xc0000a8c60, {0xc0128c0800, 0x2e5, 0x400})
	/usr/local/go/src/internal/poll/fd_unix.go:367 +0x65
os.(*File).write(...)
	/usr/local/go/src/os/file_posix.go:46
os.(*File).Write(0xc00006e048, {0xc0128c0800?, 0x2e5, 0x5?})
	/usr/local/go/src/os/file.go:189 +0x51
/gopkg/logs/v2/writer.(*ConsoleWriter).Write(0xc00393e6d8, {0x42061c0, 0xc0082bfa20})
	/opt/tiger/compile_path/pkg/mod/gopkg/logs/v2@v2.1.51/writer/console.go:50 +0x412
/gopkg/logs/v2.(*Log).Emit(0xc0082bfa20)
	/opt/tiger/compile_path/pkg/mod/gopkg/logs/v2@v2.1.51/log.go:638 +0xf07
/lib/common/pkg/log.logStr({0x41f4250, 0xc00f628300}, 0xc002de0d80?, {0x0?, 0xc001f6e100?, 0xc00cb99688?}, {0xc00546e3c0, 0x1d8})
	/opt/tiger/compile_path/pkg/mod//lib/common@v0.3.20/pkg/log/common.go:171 +0x25d
/lib/common/pkg/log.(*Logger).Debugf(0xc00393e738, {0x3c6d2d7?, 0x41609a?}, {0xc002de0d80?, 0xc004b78c78?, 0xc?})
	/opt/tiger/compile_path/pkg/mod//lib/common@v0.3.20/pkg/log/common.go:114 +0x96
/lib/common/pkg/client/workflow.(*temporalLogger).Debug(0xc00b66ae40?, {0x3c6d2d7?, 0x7f658f44c2b8?}, {0xc002de0d80?, 0x35883a0?, 0x80?})
	/opt/tiger/compile_path/pkg/mod//lib/common@v0.3.20/pkg/client/workflow/logger.go:16 +0x25
temporal/sdk/log.(*withLogger).Debug(0xc00b6bcf90, {0x3c6d2d7, 0xf}, {0xc001f6e100?, 0xc?, 0x41535b?})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/log/with_logger.go:64 +0xe2
temporal/sdk/log.(*withLogger).Debug(0xc0125e7a70, {0x3c6d2d7, 0xf}, {0xc0178f5e40?, 0x4?, 0xc012177c80?})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/log/with_logger.go:64 +0xe2
temporal/sdk/internal/log.(*ReplayLogger).Debug(0x3c9eb51?, {0x3c6d2d7?, 0xc0101832c0?}, {0xc0178f5e40?, 0x1?, 0x7f658fdcc3e8?})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/log/replay_logger.go:58 +0x3c
temporal/sdk/internal.(*workflowEnvironmentImpl).ExecuteActivity(0xc0082d84e0, {{{0x0, 0x0}, {0xc00a04da10, 0x14}, 0x0, 0x0, 0xdf8475800, 0x0, 0x0, ...}, ...}, ...)
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_event_handlers.go:769 +0x7b7
temporal/sdk/internal.(*workflowEnvironmentInterceptor).ExecuteActivity(0xc008df49b0, {0x41f4740, 0xc012443620}, {0x3c9eb51, 0x1d}, {0xc012b785a0, 0x1, 0x1})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/workflow.go:792 +0x704
temporal/sdk/internal.ExecuteActivity({0x41f4740, 0xc012443590}, {0x34b0ee0, 0x41be350}, {0xc012b785a0, 0x1, 0x1})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/workflow.go:736 +0x1c3
temporal/sdk/workflow.ExecuteActivity(...)
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/workflow/workflow.go:189
/lib/catalog_management/internal/workflow.SyncGoogleSKU.func1({0x28eb2bd?, 0xc007b1e770?})
	**/opt/tiger/compile_path/src//lib/catalog_management/internal/workflow/sync.go:43 +0x1bf**
**temporal/sdk/internal.(*selectorImpl).Select.func8.1()**
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_workflow.go:1477 +0x62
temporal/sdk/internal.(*selectorImpl).Select(0xc006bf3020, {0x41f4740, 0xc009c0c9c0})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_workflow.go:1507 +0x959
/lib/catalog_management/internal/workflow.SyncGoogleSKU({0x41f4740, 0xc009c0c9c0}, {{0xc0056fef80, 0xc}, {0xc00a04daa0, 0x13}, {0xc0056fef78, 0x4}})
	**/opt/tiger/compile_path/src//lib/catalog_management/internal/workflow/sync.go:61 +0x493**
reflect.Value.call({0x35d8180?, 0x3d48818?, 0x30?}, {0x3c3c5a7, 0x4}, {0xc009c0c9f0, 0x2, 0xc00cb99688?})
	/usr/local/go/src/reflect/value.go:596 +0xca6
reflect.Value.Call({0x35d8180?, 0x3d48818?, 0x0?}, {0xc009c0c9f0?, 0xc0053f1880?, 0xc004b79d30?})
	/usr/local/go/src/reflect/value.go:380 +0xb9
temporal/sdk/internal.executeFunction({0x35d8180, 0x3d48818}, {0xc0053f1880, 0x2, 0x35883a0?})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_worker.go:1984 +0x26b
temporal/sdk/internal.(*workflowEnvironmentInterceptor).ExecuteWorkflow(0xc008df49b0, {0x41f54a0, 0xc0055f77a0}, 0xc010685110)
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/workflow.go:691 +0x150
temporal/sdk/internal.(*workflowExecutor).Execute(0xc013e2ca00, {0x41f54a0, 0xc0055f77a0}, 0xc013e2c280)
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_worker.go:856 +0x291
temporal/sdk/internal.(*syncWorkflowDefinition).Execute.func1({0x41f4740, 0xc009c0c360})
	/opt/tiger/compile_path/pkg/mod/temporal/sdk@v1.29.1/internal/internal_workflow.go:571 +0xc6)

Thank you in advance for your time and assistance.

Looking at stack trace my best guess would be to look at your workflow logger impl that you are setting up. Have seen before with other users where logger impl (distributed in most cases that i have seen at last) can start blocking and causing deadlock detector to trigger.