Skip to content

Fix OpenAI OAuth gpt-image-2 generation routing#2316

Open
ToxicantX wants to merge 1 commit intoWei-Shaw:mainfrom
ToxicantX:codex/fix-gpt-image-2-oauth-generation
Open

Fix OpenAI OAuth gpt-image-2 generation routing#2316
ToxicantX wants to merge 1 commit intoWei-Shaw:mainfrom
ToxicantX:codex/fix-gpt-image-2-oauth-generation

Conversation

@ToxicantX
Copy link
Copy Markdown

Summary

  • route non-streaming OpenAI OAuth gpt-image-2 image generation through ChatGPT picture_v2 conversation flow
  • map gpt-image-2 to ChatGPT backend slug gpt-5-3
  • keep existing Responses API path for streaming/edit compatibility
  • update image gateway tests for the new OAuth generation flow

Tests

  • docker run --rm -v ${PWD}\backend:/app -w /app golang:1.26.3-alpine sh -c "go test ./internal/service -run OpenAIImages -count=1"

Copilot AI review requested due to automatic review settings May 9, 2026 06:06
@github-actions
Copy link
Copy Markdown
Contributor

github-actions Bot commented May 9, 2026

Thank you for your contribution! Before we can merge this PR, we need you to sign our Contributor License Agreement (CLA).

To sign, please reply with the following comment:

I have read the CLA Document and I hereby sign the CLA

You only need to sign once — it will be valid for all your future contributions to this project.


I have read the CLA Document and I hereby sign the CLA


You can retrigger this bot by commenting recheck in this Pull Request. Posted by the CLA Assistant Lite bot.

Copy link
Copy Markdown
Contributor

Copilot AI left a comment

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Pull request overview

Routes OpenAI OAuth non-streaming gpt-image-2 image generation through the ChatGPT picture_v2 conversation flow (with model slug mapping to gpt-5-3), while preserving the existing Responses API path for streaming/edit scenarios. This updates the gateway’s upstream request sequence and adjusts tests to validate the new OAuth generation routing behavior.

Changes:

  • Added ChatGPT conversation bootstrap/requirements + prepare/start request flow for OAuth non-streaming image generations.
  • Mapped gpt-image-2 to ChatGPT backend model slug gpt-5-3 for the conversation-based generation path.
  • Updated image gateway tests to assert the new multi-request upstream sequence and payload expectations.

Reviewed changes

Copilot reviewed 2 out of 2 changed files in this pull request and generated 7 comments.

File Description
backend/internal/service/openai_images_responses.go Implements ChatGPT picture_v2 conversation routing for OAuth non-streaming gpt-image-2 generations and keeps Responses API for streaming/edits.
backend/internal/service/openai_images_test.go Updates OAuth image forwarding test to validate the ChatGPT conversation prepare/start flow and headers/payloads.

💡 Add Copilot custom instructions for smarter, more guided reviews. Learn how to get started.

Comment on lines +332 to +335
payload, _ = sjson.SetBytes(payload, "parent_message_id", uuid.NewString())
payload, _ = sjson.SetBytes(payload, "model", openAIImagesChatGPTModelSlug(requestModel))
payload, _ = sjson.SetBytes(payload, "partial_query.id", uuid.NewString())
payload, _ = sjson.SetBytes(payload, "partial_query.content.parts.0", prompt)
Comment on lines +443 to +445
func buildOpenAIImagesLegacyRequirementsToken(userAgent string) string {
seed := fmt.Sprintf("%0.16f", rand.Float64())
config := []any{
Comment on lines +500 to +524
func generateOpenAIImagesPOW(seed string, difficulty string, config []any, limit int) (string, bool) {
target, err := hexStringBytes(difficulty)
if err != nil || len(target) == 0 {
return "", false
}
diffLen := len(target)
seedBytes := []byte(seed)
static1 := mustMarshalOpenAIPOWJSONPrefix(config[:3], true)
static2 := mustMarshalOpenAIPOWJSONMiddle(config[4:9])
static3 := mustMarshalOpenAIPOWJSONSuffix(config[10:])
for i := 0; i < limit; i++ {
finalJSON := append([]byte{}, static1...)
finalJSON = append(finalJSON, []byte(fmt.Sprintf("%d", i))...)
finalJSON = append(finalJSON, static2...)
finalJSON = append(finalJSON, []byte(fmt.Sprintf("%d", i>>1))...)
finalJSON = append(finalJSON, static3...)
encoded := []byte(base64.StdEncoding.EncodeToString(finalJSON))
h := sha3.New512()
_, _ = h.Write(seedBytes)
_, _ = h.Write(encoded)
if bytes.Compare(h.Sum(nil)[:diffLen], target) <= 0 {
return string(encoded), true
}
}
return "wQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D" + base64.StdEncoding.EncodeToString([]byte(`"`+seed+`"`)), false
Comment on lines +592 to +612
func extractOpenAIImagesPOWResources(html string) ([]string, string) {
matches := regexp.MustCompile(`<script[^>]+src=["']([^"']+)["']`).FindAllStringSubmatch(html, -1)
sources := make([]string, 0, len(matches))
dataBuild := ""
for _, match := range matches {
if len(match) < 2 || strings.TrimSpace(match[1]) == "" {
continue
}
sources = append(sources, strings.TrimSpace(match[1]))
if dataBuild == "" {
if hit := regexp.MustCompile(`c/[^/]*/_`).FindString(match[1]); hit != "" {
dataBuild = hit
}
}
}
if len(sources) == 0 {
sources = []string{openAIChatGPTDefaultPOWScript}
}
if dataBuild == "" {
if match := regexp.MustCompile(`<html[^>]*data-build=["']([^"']*)["']`).FindStringSubmatch(html); len(match) > 1 {
dataBuild = match[1]
Comment on lines +637 to +649
bootstrapResp, err := s.doOpenAIImagesChatGPTRequest(ctx, account, http.MethodGet, openAIChatGPTStartURL, "/", nil, token, http.Header{
"Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8"},
})
if err != nil {
return openAIChatRequirements{}, err
}
bootstrapBody, _ := io.ReadAll(io.LimitReader(bootstrapResp.Body, 2<<20))
_ = bootstrapResp.Body.Close()
if bootstrapResp.StatusCode >= 400 {
return openAIChatRequirements{}, fmt.Errorf("chatgpt bootstrap failed: status %d", bootstrapResp.StatusCode)
}
_, _ = extractOpenAIImagesPOWResources(string(bootstrapBody))

Comment on lines +1719 to 1724
if parsed.Stream || parsed.IsEdits() {
return s.forwardOpenAIImagesOAuthResponses(ctx, c, account, parsed, requestModel, startTime)
}

upstreamCtx, releaseUpstreamCtx := detachStreamUpstreamContext(ctx, parsed.Stream)
defer releaseUpstreamCtx()
seed := fmt.Sprintf("%0.16f", rand.Float64())
config := []any{
3000,
time.Now().UTC().Format("Mon Jan 02 2006 15:04:05") + " GMT-0500 (Eastern Standard Time)",
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

3 participants