Fix OpenAI OAuth gpt-image-2 generation routing#2316
Fix OpenAI OAuth gpt-image-2 generation routing#2316ToxicantX wants to merge 1 commit intoWei-Shaw:mainfrom
Conversation
|
Thank you for your contribution! Before we can merge this PR, we need you to sign our Contributor License Agreement (CLA). To sign, please reply with the following comment:
You only need to sign once — it will be valid for all your future contributions to this project. I have read the CLA Document and I hereby sign the CLA You can retrigger this bot by commenting recheck in this Pull Request. Posted by the CLA Assistant Lite bot. |
There was a problem hiding this comment.
Pull request overview
Routes OpenAI OAuth non-streaming gpt-image-2 image generation through the ChatGPT picture_v2 conversation flow (with model slug mapping to gpt-5-3), while preserving the existing Responses API path for streaming/edit scenarios. This updates the gateway’s upstream request sequence and adjusts tests to validate the new OAuth generation routing behavior.
Changes:
- Added ChatGPT conversation bootstrap/requirements + prepare/start request flow for OAuth non-streaming image generations.
- Mapped
gpt-image-2to ChatGPT backend model sluggpt-5-3for the conversation-based generation path. - Updated image gateway tests to assert the new multi-request upstream sequence and payload expectations.
Reviewed changes
Copilot reviewed 2 out of 2 changed files in this pull request and generated 7 comments.
| File | Description |
|---|---|
backend/internal/service/openai_images_responses.go |
Implements ChatGPT picture_v2 conversation routing for OAuth non-streaming gpt-image-2 generations and keeps Responses API for streaming/edits. |
backend/internal/service/openai_images_test.go |
Updates OAuth image forwarding test to validate the ChatGPT conversation prepare/start flow and headers/payloads. |
💡 Add Copilot custom instructions for smarter, more guided reviews. Learn how to get started.
| payload, _ = sjson.SetBytes(payload, "parent_message_id", uuid.NewString()) | ||
| payload, _ = sjson.SetBytes(payload, "model", openAIImagesChatGPTModelSlug(requestModel)) | ||
| payload, _ = sjson.SetBytes(payload, "partial_query.id", uuid.NewString()) | ||
| payload, _ = sjson.SetBytes(payload, "partial_query.content.parts.0", prompt) |
| func buildOpenAIImagesLegacyRequirementsToken(userAgent string) string { | ||
| seed := fmt.Sprintf("%0.16f", rand.Float64()) | ||
| config := []any{ |
| func generateOpenAIImagesPOW(seed string, difficulty string, config []any, limit int) (string, bool) { | ||
| target, err := hexStringBytes(difficulty) | ||
| if err != nil || len(target) == 0 { | ||
| return "", false | ||
| } | ||
| diffLen := len(target) | ||
| seedBytes := []byte(seed) | ||
| static1 := mustMarshalOpenAIPOWJSONPrefix(config[:3], true) | ||
| static2 := mustMarshalOpenAIPOWJSONMiddle(config[4:9]) | ||
| static3 := mustMarshalOpenAIPOWJSONSuffix(config[10:]) | ||
| for i := 0; i < limit; i++ { | ||
| finalJSON := append([]byte{}, static1...) | ||
| finalJSON = append(finalJSON, []byte(fmt.Sprintf("%d", i))...) | ||
| finalJSON = append(finalJSON, static2...) | ||
| finalJSON = append(finalJSON, []byte(fmt.Sprintf("%d", i>>1))...) | ||
| finalJSON = append(finalJSON, static3...) | ||
| encoded := []byte(base64.StdEncoding.EncodeToString(finalJSON)) | ||
| h := sha3.New512() | ||
| _, _ = h.Write(seedBytes) | ||
| _, _ = h.Write(encoded) | ||
| if bytes.Compare(h.Sum(nil)[:diffLen], target) <= 0 { | ||
| return string(encoded), true | ||
| } | ||
| } | ||
| return "wQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D" + base64.StdEncoding.EncodeToString([]byte(`"`+seed+`"`)), false |
| func extractOpenAIImagesPOWResources(html string) ([]string, string) { | ||
| matches := regexp.MustCompile(`<script[^>]+src=["']([^"']+)["']`).FindAllStringSubmatch(html, -1) | ||
| sources := make([]string, 0, len(matches)) | ||
| dataBuild := "" | ||
| for _, match := range matches { | ||
| if len(match) < 2 || strings.TrimSpace(match[1]) == "" { | ||
| continue | ||
| } | ||
| sources = append(sources, strings.TrimSpace(match[1])) | ||
| if dataBuild == "" { | ||
| if hit := regexp.MustCompile(`c/[^/]*/_`).FindString(match[1]); hit != "" { | ||
| dataBuild = hit | ||
| } | ||
| } | ||
| } | ||
| if len(sources) == 0 { | ||
| sources = []string{openAIChatGPTDefaultPOWScript} | ||
| } | ||
| if dataBuild == "" { | ||
| if match := regexp.MustCompile(`<html[^>]*data-build=["']([^"']*)["']`).FindStringSubmatch(html); len(match) > 1 { | ||
| dataBuild = match[1] |
| bootstrapResp, err := s.doOpenAIImagesChatGPTRequest(ctx, account, http.MethodGet, openAIChatGPTStartURL, "/", nil, token, http.Header{ | ||
| "Accept": {"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8"}, | ||
| }) | ||
| if err != nil { | ||
| return openAIChatRequirements{}, err | ||
| } | ||
| bootstrapBody, _ := io.ReadAll(io.LimitReader(bootstrapResp.Body, 2<<20)) | ||
| _ = bootstrapResp.Body.Close() | ||
| if bootstrapResp.StatusCode >= 400 { | ||
| return openAIChatRequirements{}, fmt.Errorf("chatgpt bootstrap failed: status %d", bootstrapResp.StatusCode) | ||
| } | ||
| _, _ = extractOpenAIImagesPOWResources(string(bootstrapBody)) | ||
|
|
| if parsed.Stream || parsed.IsEdits() { | ||
| return s.forwardOpenAIImagesOAuthResponses(ctx, c, account, parsed, requestModel, startTime) | ||
| } | ||
|
|
||
| upstreamCtx, releaseUpstreamCtx := detachStreamUpstreamContext(ctx, parsed.Stream) | ||
| defer releaseUpstreamCtx() |
| seed := fmt.Sprintf("%0.16f", rand.Float64()) | ||
| config := []any{ | ||
| 3000, | ||
| time.Now().UTC().Format("Mon Jan 02 2006 15:04:05") + " GMT-0500 (Eastern Standard Time)", |
Summary
Tests