@@ -20,9 +20,9 @@ let js_code_exec_output = '';
20
20
let original_code = '' ;
21
21
let temp_safe_mode = false ;
22
22
let pre_function_text = '' ;
23
- let azure_endpoint = localStorage . getItem ( 'azure_endpoint' ) ;
24
23
let all_chunks = [ ] ;
25
24
let has_chunk_error = false ;
25
+ let proxy_url = window . location . origin + window . location . pathname + "/cors-proxy.php" ;
26
26
27
27
// Markdown to HTML
28
28
showdown . setFlavor ( 'github' ) ;
@@ -45,14 +45,21 @@ let PLATFORM_DATA = {
45
45
models : [
46
46
"gemini-2.0-flash-exp" ,
47
47
"gemini-exp-1206" ,
48
- "learnlm-1.5-pro-experimental" ,
49
48
"gemini-1.5-pro" ,
50
49
"gemini-1.5-flash" ,
51
50
"gemini-1.5-flash-8b"
52
51
] ,
53
52
name : "Google" ,
54
53
endpoint : 'https://generativelanguage.googleapis.com/v1beta/models/{{model}}:{{gen_mode}}?key={{api_key}}'
55
54
} ,
55
+ deepseek : {
56
+ models : [
57
+ "deepseek-reasoner" ,
58
+ "deepseek-chat"
59
+ ] ,
60
+ name : "DeepSeek" ,
61
+ endpoint : "https://api.deepseek.com/chat/completions"
62
+ } ,
56
63
anthropic : {
57
64
models : [
58
65
"claude-3-5-sonnet-20241022" ,
@@ -90,6 +97,7 @@ let PLATFORM_DATA = {
90
97
"Meta-Llama-3.1-405B-Instruct" ,
91
98
"Llama-3.2-90B-Vision-Instruct"
92
99
] ,
100
+ needProxy : true ,
93
101
name : "SambaNova" ,
94
102
endpoint : "https://api.sambanova.ai/v1/chat/completions"
95
103
@@ -148,15 +156,6 @@ let PLATFORM_DATA = {
148
156
}
149
157
150
158
151
- if ( azure_endpoint ) {
152
- PLATFORM_DATA . azure = {
153
- models : [
154
- "gpt-4o-mini"
155
- ] ,
156
- name : "Azure" ,
157
- endpoint : azure_endpoint
158
- } ;
159
- }
160
159
161
160
const language_extension = {
162
161
"python" : "py" ,
@@ -617,7 +616,6 @@ function toggleAiGenAnimation(do_animate = 'toggle') {
617
616
function chat ( ) {
618
617
toggleAiGenAnimation ( true ) ;
619
618
if ( chosen_platform === 'google' ) {
620
- // endpoint = "https://generativelanguage.googleapis.com/v1beta/openai/chat/completions";
621
619
return geminiChat ( ) ;
622
620
}
623
621
return streamChat ( ) ;
@@ -1988,15 +1986,13 @@ async function streamChat(can_use_tools = true) {
1988
1986
if ( chosen_platform !== 'anthropic' ) {
1989
1987
if ( ! cmd ) {
1990
1988
if ( ! base64String ) {
1991
- // Groq vision accept no system prompt??
1992
1989
all_parts . push ( system_prompt ) ;
1993
1990
}
1994
1991
}
1995
1992
}
1996
1993
}
1997
1994
1998
1995
conversations . messages . forEach ( part => {
1999
- //let role = part.role === 'assistant' ? 'model' : part.role;
2000
1996
let cnt = part . content ;
2001
1997
last_role = part . role ;
2002
1998
last_cnt = part . content ;
@@ -2147,6 +2143,8 @@ async function streamChat(can_use_tools = true) {
2147
2143
return false ;
2148
2144
}
2149
2145
2146
+ endpoint = getEndpoint ( ) ;
2147
+
2150
2148
try {
2151
2149
const response = await fetch ( endpoint , requestOptions ) ;
2152
2150
if ( ! response . ok ) {
@@ -3150,6 +3148,18 @@ function loadUserAddedPrompts(){
3150
3148
loadUserAddedPrompts ( )
3151
3149
3152
3150
3151
+ //Checks if it is necessary to pass the request via cors-proxy.php to get rid of cors and
3152
+ // if so returns a new endpoint address
3153
+ function getEndpoint ( ) {
3154
+ let needProxy = PLATFORM_DATA [ chosen_platform ] ?. needProxy ?? false ;
3155
+ let endpoint = PLATFORM_DATA [ chosen_platform ] ?. endpoint ;
3156
+ if ( needProxy ) {
3157
+ return `${ proxy_url } ?endpoint=${ endpoint } ` ;
3158
+ }
3159
+ return endpoint ;
3160
+ }
3161
+
3162
+
3153
3163
function deletePrompt ( ) {
3154
3164
let sl_prompt = document . querySelector ( "select[name=prompt]" ) ;
3155
3165
let selectedOption = sl_prompt . options [ sl_prompt . selectedIndex ] ;
@@ -3176,8 +3186,6 @@ function deletePrompt(){
3176
3186
}
3177
3187
}
3178
3188
3179
-
3180
-
3181
3189
selectedOption . remove ( ) ;
3182
3190
console . log ( selectedOption )
3183
3191
document . querySelector ( "textarea.system_prompt" ) . value = '' ;
0 commit comments