herramientas-chagpt.js 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. import fetch from 'node-fetch';
  2. import fs from 'fs';
  3. import path from 'path';
  4. import axios from 'axios';
  5. import translate from '@vitalets/google-translate-api';
  6. import { perplexity } from '../lib/scraper.js';
  7. import { Configuration, OpenAIApi } from "openai";
  8. const apikey_base64 = "c2stcHJvai1tUzN4bGZueXo0UjBPWV8zbm1DVDlMQmlmYXhYbVdaa0ptUVFJMDVKR2FxdHZCbk9ncWZjRXdCbEJmMU5WN0lYa0pncVJuM3BNc1QzQmxia0ZKMVJ5aEJzUl93NzRXbll5LWdjdkowT0NQUXliWTBOcENCcDZIOTlCVVVtcWxuTjVraEZxMk43TGlMU0RsU0s1cXA5Tm1kWVZXc0E=";
  9. const apikey = Buffer.from(apikey_base64, 'base64').toString('utf-8');
  10. const configuration = new Configuration({apiKey: apikey,
  11. });
  12. const openai = new OpenAIApi(configuration);
  13. const handler = async (m, {conn, text, usedPrefix, command}) => {
  14. let who = m.mentionedJid && m.mentionedJid[0] ? m.mentionedJid[0] : m.fromMe ? conn.user.jid : m.sender
  15. let pp = await conn.profilePictureUrl(who, 'image').catch(_ => 'https://telegra.ph/file/9d38415096b6c46bf03f8.jpg')
  16. if (!text) return m.reply(await tr(`*Hola cómo esta 😊, El que te puedo ayudar?*, ingrese una petición o orden para usar la función de chagpt\n*Ejemplo:*\n${usedPrefix + command} Recomienda un top 10 de películas de acción`))
  17. let syms1 = await fetch('https://raw.githubusercontent.com/crxsmods/text3/refs/heads/main/text-chatgpt').then(v => v.text());
  18. if (command == 'ia' || command == 'chatgpt') {
  19. await conn.sendPresenceUpdate('composing', m.chat)
  20. try {
  21. // Usar la nueva API solicitada
  22. const encodedPrompt = encodeURIComponent(syms1);
  23. const encodedContent = encodeURIComponent(text);
  24. let gpt = await fetch(`https://api.siputzx.my.id/api/ai/gpt3?prompt=${encodedPrompt}&content=${encodedContent}`);
  25. let res = await gpt.json();
  26. if (res.status) {
  27. await m.reply(res.data);
  28. } else {
  29. // Si falla, intentar con los respaldos originales
  30. throw new Error("API principal falló");
  31. }
  32. } catch {
  33. try {
  34. const messages = [{ role: 'system', content: syms1 },
  35. { role: 'user', content: text }];
  36. const chooseModel = (query) => {
  37. const lowerText = query.toLowerCase();
  38. if (lowerText.includes('código') || lowerText.includes('programación') || lowerText.includes('code') || lowerText.includes('script')) {
  39. return 'codellama-70b-instruct';
  40. } else if (lowerText.includes('noticias') || lowerText.includes('actual') || lowerText.includes('hoy') || lowerText.includes('último')) {
  41. return 'sonar-medium-online';
  42. } else if (lowerText.includes('explica') || lowerText.includes('por qué') || lowerText.includes('razona') || lowerText.includes('analiza')) {
  43. return 'sonar-reasoning-pro';
  44. } else if (lowerText.includes('cómo') || lowerText.includes('paso a paso') || lowerText.includes('instrucciones')) {
  45. return 'mixtral-8x7b-instruct';
  46. } else if (lowerText.includes('charla') || lowerText.includes('habla') || lowerText.includes('dime')) {
  47. return 'sonar-medium-chat';
  48. } else {
  49. return 'sonar-pro';
  50. }};
  51. const selectedModel = chooseModel(text);
  52. const fallbackModels = Object.keys(perplexity.api.models).filter(m => m !== selectedModel);
  53. let response = await perplexity.chat(messages, selectedModel);
  54. if (!response.status) {
  55. for (const fallback of fallbackModels) {
  56. try {
  57. response = await perplexity.chat(messages, fallback);
  58. if (response.status) {
  59. break;
  60. }
  61. } catch (e) {
  62. console.error(`Falló ${fallback}: ${e.message}`);
  63. }
  64. }
  65. }
  66. if (response.status) {
  67. await m.reply(response.result.response);
  68. } else {
  69. throw new Error("Perplexity falló");
  70. }
  71. } catch {
  72. try {
  73. async function getResponse(prompt) {
  74. try {
  75. await delay(1000);
  76. const response = await axios.post('https://api.openai.com/v1/chat/completions',
  77. { model: 'gpt-4o-mini',
  78. messages: [{ role: 'user', content: prompt }],
  79. max_tokens: 300,
  80. }, { headers: {
  81. 'Content-Type': 'application/json',
  82. 'Authorization': `Bearer ${apikey}`,
  83. }});
  84. return response.data.choices[0].message.content;
  85. } catch (error) {
  86. console.error(error);
  87. }
  88. }
  89. const respuesta = await getResponse(text);
  90. m.reply(respuesta);
  91. } catch {
  92. try {
  93. let gpt = await fetch(`${apis}/ia/gptprompt?text=${text}?&prompt=${syms1}`);
  94. let res = await gpt.json();
  95. await m.reply(res.data);
  96. } catch {
  97. try {
  98. let gpt = await fetch(`${apis}/ia/gptweb?text=${text}`);
  99. let res = await gpt.json();
  100. await m.reply(res.gpt);
  101. } catch {
  102. m.reply("Lo siento, todos los servicios de IA están fallando en este momento. Inténtalo más tarde.");
  103. }
  104. }
  105. }
  106. }
  107. }}
  108. if (command == 'openai' || command == 'ia2' || command == 'chatgpt2') {
  109. conn.sendPresenceUpdate('composing', m.chat);
  110. let gpt = await fetch(`${apis}/api/ia2?text=${text}`)
  111. let res = await gpt.json()
  112. await m.reply(res.gpt)
  113. }
  114. if (command == 'gemini') {
  115. await conn.sendPresenceUpdate('composing', m.chat)
  116. try {
  117. let gpt = await fetch(`https://api.dorratz.com/ai/gemini?prompt=${text}`)
  118. let res = await gpt.json()
  119. await m.reply(res.message)
  120. } catch {
  121. try {
  122. let gpt = await fetch(`${apis}/ia/gemini?query=${text}`)
  123. let res = await gpt.json()
  124. await m.reply(res.message)
  125. } catch {
  126. }}}
  127. if (command == 'copilot' || command == 'bing') {
  128. await conn.sendPresenceUpdate('composing', m.chat)
  129. try {
  130. let gpt = await fetch(`https://api.dorratz.com/ai/bing?prompt=${text}`)
  131. let res = await gpt.json()
  132. await conn.sendMessage(m.chat, { text: res.result.ai_response, contextInfo: {
  133. externalAdReply: {
  134. title: "[ IA COPILOT ]",
  135. body: wm,
  136. thumbnailUrl: "https://qu.ax/nTDgf.jpg",
  137. sourceUrl: [nna, nna2, nn, md, yt, tiktok].getRandom(),
  138. mediaType: 1,
  139. showAdAttribution: false,
  140. renderLargerThumbnail: false
  141. }}}, { quoted: m })
  142. } catch {
  143. try {
  144. let gpt = await fetch(`${apis}/ia/bingia?query=${text}`)
  145. let res = await gpt.json()
  146. await m.reply(res.message)
  147. } catch {
  148. }}}}
  149. handler.help = ["chagpt", "ia", "openai", "gemini", "copilot"]
  150. handler.tags = ["buscadores"]
  151. handler.command = /^(openai|chatgpt|ia|ai|openai2|chatgpt2|ia2|gemini|copilot|bing)$/i;
  152. export default handler;
  153. const delay = (ms) => new Promise(resolve => setTimeout(resolve, ms));