Skip to content

Commit 68478fd

Browse files
committed
Added ToYaml.com/GET API support ↞ [auto-sync from https://github.com/adamlui/ai-web-extensions/tree/main/amazongpt]
1 parent 6999617 commit 68478fd

File tree

1 file changed

+42
-24
lines changed

1 file changed

+42
-24
lines changed

chatgpt/amazongpt/amazongpt.user.js

Lines changed: 42 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
// @description Adds the magic of AI to Amazon shopping
44
// @author KudoAI
55
// @namespace https://kudoai.com
6-
// @version 2025.1.14.5
6+
// @version 2025.1.14.6
77
// @license MIT
88
// @icon https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon48.png?v=0fddfc7
99
// @icon64 https://amazongpt.kudoai.com/assets/images/icons/amazongpt/black-gold-teal/icon64.png?v=0fddfc7
@@ -56,6 +56,7 @@
5656
// @connect chatgpt.com
5757
// @connect update.greasyfork.org
5858
// @connect fanyi.sogou.com
59+
// @connect toyaml.com
5960
// @require https://cdn.jsdelivr.net/npm/@kudoai/chatgpt.js@3.5.0/dist/chatgpt.min.js#sha256-+C0x4BOFQc38aZB3pvUC2THu+ZSvuCxRphGdtRLjCDg=
6061
// @require https://cdnjs.cloudflare.com/ajax/libs/crypto-js/4.2.0/crypto-js.min.js#sha256-dppVXeVTurw1ozOPNE3XqhYmDJPOosfbKQcHyQSE58w=
6162
// @require https://cdn.jsdelivr.net/npm/generate-ip@2.4.4/dist/generate-ip.min.js#sha256-aQQKAQcMgCu8IpJp9HKs387x0uYxngO+Fb4pc5nSF4I=
@@ -429,6 +430,11 @@
429430
},
430431
expectedOrigin: { url: 'https://chatgpt.com', headers: { 'Priority': 'u=4' }},
431432
method: 'POST', streamable: true
433+
},
434+
'ToYaml.com': {
435+
endpoint: 'https://toyaml.com/streams',
436+
expectedOrigin: { url: 'https://toyaml.com/chat.html', headers: { 'x-requested-with': 'XMLHttpRequest' }},
437+
method: 'GET', streamable: true, watermark: '【本答案来自 toyaml.com】'
432438
}
433439
}
434440

@@ -2502,14 +2508,18 @@
25022508
const ip = ipv4.generate({ verbose: false })
25032509
const headers = {
25042510
'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate, br, zstd',
2505-
'Connection': 'keep-alive', 'Content-Type': 'application/json', 'DNT': '1',
2506-
'Host': new URL(apis[api].endpoints?.completions || apis[api].endpoint).hostname,
2507-
'Origin': apis[api].expectedOrigin.url, 'Sec-Fetch-Site': 'same-origin',
2508-
'Sec-Fetch-Dest': 'empty', 'Sec-Fetch-Mode': 'cors', 'X-Forwarded-For': ip, 'X-Real-IP': ip
2511+
'Connection': 'keep-alive', 'DNT': '1',
2512+
'Origin': apis[api].expectedOrigin.url, 'X-Forwarded-For': ip, 'X-Real-IP': ip
25092513
}
25102514
headers.Referer = headers.Origin + '/'
2511-
if (api == 'OpenAI') headers.Authorization = 'Bearer ' + config.openAIkey
2512-
Object.assign(headers, apis[api].expectedOrigin.headers)
2515+
if (apis[api].method == 'POST') Object.assign(headers, {
2516+
'Content-Type': 'application/json',
2517+
'Host': new URL(apis[api].endpoints?.completions || apis[api].endpoint).hostname,
2518+
'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Dest': 'empty', 'Sec-Fetch-Mode': 'cors'
2519+
})
2520+
else if (apis[api].method == 'GET') headers['x-requested-with'] = 'XMLHttpRequest'
2521+
Object.assign(headers, apis[api].expectedOrigin.headers) // API-specific ones
2522+
if (api == 'OpenAI') headers.Authorization = `Bearer ${config.openAIkey}`
25132523
return headers
25142524
},
25152525

@@ -2632,19 +2642,22 @@
26322642
}
26332643

26342644
// Get/show answer from AI
2635-
xhr({
2636-
method: apis[get.reply.api].method,
2637-
url: apis[get.reply.api].endpoints?.completions || apis[get.reply.api].endpoint,
2645+
const reqMethod = apis[get.reply.api].method
2646+
const xhrConfig = {
2647+
headers: api.createHeaders(get.reply.api), method: reqMethod,
26382648
responseType: config.streamingDisabled || !config.proxyAPIenabled ? 'text' : 'stream',
2639-
headers: api.createHeaders(get.reply.api), data: await api.createPayload(get.reply.api, msgChain),
2640-
onload: resp => dataProcess.text(get.reply, resp),
2641-
onloadstart: resp => dataProcess.stream(get.reply, resp),
26422649
onerror: err => { log.error(err)
26432650
if (!config.proxyAPIenabled)
26442651
appAlert(!config.openAIkey ? 'login' : ['openAInotWorking', 'suggestProxy'])
26452652
else api.tryNew(get.reply)
2646-
}
2647-
})
2653+
},
2654+
onload: resp => dataProcess.text(get.reply, resp),
2655+
onloadstart: resp => dataProcess.stream(get.reply, resp),
2656+
url: ( apis[get.reply.api].endpoints?.completions || apis[get.reply.api].endpoint )
2657+
+ ( reqMethod == 'GET' ? `?q=${encodeURIComponent(msgChain[msgChain.length -1].content)}` : '' )
2658+
}
2659+
if (reqMethod == 'POST') xhrConfig.data = await api.createPayload(get.reply.api, msgChain)
2660+
xhr(xhrConfig)
26482661
}
26492662
}
26502663

@@ -2666,15 +2679,9 @@
26662679
reader.read().then(processStreamText).catch(err => log.error('Error processing stream', err.message))
26672680

26682681
function processStreamText({ done, value }) {
2669-
if (done) { caller.sender = null
2670-
if (appDiv.querySelector('.loading')) // no text shown
2671-
api.tryNew(caller)
2672-
else { // text was shown
2673-
caller.status = 'done' ; caller.attemptCnt = null
2674-
show.replyCornerBtns() ; api.clearTimedOut(caller.triedAPIs)
2675-
} return
2676-
}
2682+
if (done) { handleProcessCompletion() ; return }
26772683
let chunk = new TextDecoder('utf8').decode(new Uint8Array(value))
2684+
if (chunk.includes(apis[caller.api].watermark)) { handleProcessCompletion() ; return }
26782685
if (caller.api == 'MixerBox AI') { // pre-process chunks
26792686
const extractedChunks = Array.from(chunk.matchAll(/data:(.*)/g), match => match[1]
26802687
.replace(/\[SPACE\]/g, ' ').replace(/\[NEWLINE\]/g, '\n'))
@@ -2709,6 +2716,16 @@
27092716
processStreamText({ done, value })
27102717
}).catch(err => log.error('Error reading stream', err.message))
27112718
}
2719+
2720+
function handleProcessCompletion() {
2721+
caller.sender = null
2722+
if (appDiv.querySelector('.loading')) // no text shown
2723+
api.tryNew(caller)
2724+
else { // text was shown
2725+
caller.status = 'done' ; caller.attemptCnt = null
2726+
show.replyCornerBtns() ; api.clearTimedOut(caller.triedAPIs)
2727+
} return
2728+
}
27122729
},
27132730

27142731
text(caller, resp) {
@@ -2739,7 +2756,7 @@
27392756
} catch (err) { handleProcessError(err) }
27402757
}
27412758
} else if (resp.responseText) {
2742-
if (/AIchatOS|FREEGPT/.test(caller.api)) {
2759+
if (/AIchatOS|ToYaml.com|FREEGPT/.test(caller.api)) {
27432760
try { // to show response
27442761
const text = resp.responseText, chunkSize = 1024
27452762
let currentIdx = 0
@@ -2776,6 +2793,7 @@
27762793
api.tryNew(caller)
27772794
} else {
27782795
caller.status = 'done' ; api.clearTimedOut(caller.triedAPIs) ; caller.attemptCnt = null
2796+
respText = respText.replace(apis[caller.api].watermark, '').trim()
27792797
show.reply(respText) ; show.replyCornerBtns()
27802798
}}}
27812799

0 commit comments

Comments
 (0)