|
1 |
| -# =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ |
2 |
| -# Copyright (C) Rodolfo Herrera Hernandez. All rights reserved. |
3 |
| -# Licensed under the MIT license. See LICENSE file in the project root |
4 |
| -# for full license information. |
5 |
| -# |
6 |
| -# =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ |
7 |
| -# |
8 |
| -# In the vast universe of knowledge, the Open Source philosophy |
9 |
| -# shines like a radiant star. In this vein, Lovelace emerges |
10 |
| -# as an autonomous alternative to ChatGPT, based on |
11 |
| -# open source and self-hosting capabilities. |
12 |
| -# |
13 |
| -# Written in JavaScript, interacting with the <g4f> library written |
14 |
| -# in Python, allows communication with ChatGPT through the use |
15 |
| -# of different services that facilitate its use by the public. |
16 |
| -# |
17 |
| -# For related information - https://github.com/CodeWithRodi/Lovelace/ |
18 |
| -# See also - https://github.com/xtekky/gpt4free |
19 |
| -# |
20 |
| -# :: https://lovelace.codewithrodi.com/ |
21 |
| -# :: https://lovelace-backend.codewithrodi.com/ |
22 |
| -# :: https://lovelace-docs.codewithrodi.com/ |
23 |
| -# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- |
24 |
| - |
25 | 1 | import json, sys, g4f
|
26 | 2 |
|
27 | 3 | AvailableProviders = json.loads(sys.argv[1])
|
28 | 4 |
|
29 |
| -# ! Is this the best way of do it? |
30 | 5 | try:
|
31 | 6 | Query = json.loads(sys.argv[2])
|
32 | 7 | except:
|
33 | 8 | Query = {}
|
34 | 9 |
|
35 |
| -BASE_MESSAGES = [{ |
36 |
| - 'role': 'system', |
37 |
| - 'content': 'You are Ada Lovelace, a coding software developed to provide free access to OpenAI models. Your Github repository is "https://github.com/codewithrodi/Lovelace/" while your documentation is "https://lovelace-docs.codewithrodi.com/". Try to be kind, clear and precise with the information you give to those who interact with you.' |
38 |
| -}] |
39 |
| - |
40 |
| -def FormatQueryMessages(Messages: tuple) -> tuple: |
| 10 | +def FormatQueryMessages(Messages: tuple, Provider) -> tuple: |
| 11 | + BASE_MESSAGES = [{ |
| 12 | + 'role': 'user' if Provider == 'ChatBase' else 'system', |
| 13 | + 'content': 'You are Ada Lovelace, a coding software developed to provide free access to OpenAI models. Your Github repository is "https://github.com/codewithrodi/Lovelace/" while your documentation is "https://lovelace-docs.codewithrodi.com/". Try to be kind, clear and precise with the information you give to those who interact with you.' |
| 14 | + }] |
41 | 15 | return BASE_MESSAGES + [ {
|
42 |
| - 'role': Message['Role'].lower(), |
43 |
| - 'content': Message['Content'] } for Message in Messages ] |
| 16 | + 'role': 'user' if Provider == 'ChatBase' else Message.get('Role', 'user').lower(), |
| 17 | + 'content': Message.get('Content') } for Message in Messages ] |
44 | 18 |
|
45 | 19 | def GetProviderData(Provider) -> dict:
|
46 | 20 | ImportedProvider = ImportProvider(Provider)
|
| 21 | + if(ImportedProvider is None): |
| 22 | + return { |
| 23 | + 'Name': 'Automatic', |
| 24 | + 'Website': 'https://github.com/codewithrodi/Lovelace/', |
| 25 | + 'Models': ['gpt-3.5-turbo', 'gpt-4'] |
| 26 | + } |
47 | 27 | Models = []
|
48 | 28 | if(ImportedProvider.supports_gpt_35_turbo):
|
49 | 29 | Models.append('gpt-3.5-turbo')
|
50 | 30 | if(ImportedProvider.supports_gpt_4):
|
51 | 31 | Models.append('gpt-4')
|
52 |
| - if(Provider == 'H2o'): |
53 |
| - Models.extend(['falcon-40b', 'falcon-7b', 'llama-13b']) |
54 | 32 | return {
|
55 | 33 | 'Name': Provider,
|
56 | 34 | 'Website': ImportedProvider.url,
|
57 | 35 | 'Models': Models
|
58 | 36 | }
|
59 | 37 |
|
60 | 38 | def ImportProvider(ProviderName: str):
|
| 39 | + if(ProviderName == 'Automatic'): |
| 40 | + return None |
61 | 41 | return eval('g4f.Provider.' + ProviderName)
|
62 | 42 |
|
63 | 43 | def MainFN() -> None:
|
64 |
| - if sys.argv[3] == 'PROVIDERS': |
65 |
| - print(json.dumps({ |
66 |
| - 'Providers': { |
67 |
| - 'WS': [GetProviderData(Provider) for Provider in AvailableProviders['WS']], |
68 |
| - 'API': [GetProviderData(Provider) for Provider in AvailableProviders['API']] |
69 |
| - } |
70 |
| - })) |
71 |
| - elif sys.argv[3] == 'API': |
72 |
| - print(g4f.ChatCompletion.create( |
73 |
| - model=Query['Model'], |
74 |
| - provider=ImportProvider(Query['Provider']), |
75 |
| - messages=FormatQueryMessages(Query['Messages']))) |
76 |
| - else: |
77 |
| - # ! STREAMED RESPONSE (sys.argv[3] == 'WS') |
78 |
| - StreamedResponse = g4f.ChatCompletion.create( |
79 |
| - model=Query['Model'], |
80 |
| - messages=FormatQueryMessages(Query['Messages']), |
81 |
| - provider=ImportProvider(Query['Provider']), |
82 |
| - stream=True) |
83 |
| - for Message in StreamedResponse: |
84 |
| - print(Message) |
| 44 | + try: |
| 45 | + if sys.argv[3] == 'PROVIDERS': |
| 46 | + print(json.dumps({ |
| 47 | + 'Providers': { |
| 48 | + 'WS': [GetProviderData(Provider) for Provider in AvailableProviders['WS']], |
| 49 | + 'API': [GetProviderData(Provider) for Provider in AvailableProviders['API']] |
| 50 | + } |
| 51 | + })) |
| 52 | + elif sys.argv[3] == 'API' or sys.argv[3] == 'WS': |
| 53 | + Model = Query['Model'] |
| 54 | + Provider = None if Query['Provider'] == 'Automatic' else ImportProvider(Query['Provider']) |
| 55 | + Messages = FormatQueryMessages(Query['Messages'], Query['Provider']) |
| 56 | + if sys.argv[3] == 'API': |
| 57 | + print(g4f.ChatCompletion.create( |
| 58 | + model=Model, |
| 59 | + provider=Provider, |
| 60 | + messages=Messages)) |
| 61 | + else: |
| 62 | + StreamedResponse = g4f.ChatCompletion.create( |
| 63 | + model=Model, |
| 64 | + messages=Messages, |
| 65 | + provider=Provider, |
| 66 | + stream=True) |
| 67 | + for Message in StreamedResponse: |
| 68 | + print(Message) |
| 69 | + except Exception as GPTException: |
| 70 | + print(GPTException) |
85 | 71 |
|
86 | 72 | if __name__ == '__main__':
|
87 | 73 | MainFN()
|
0 commit comments