Last active
February 20, 2025 00:58
-
-
Save gcrbr/19e47f6f85b653c79d2bb2b14a5d1e8c to your computer and use it in GitHub Desktop.
Simple python module to use DuckDuckGo's free AI chat.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import requests, json | |
| ''' | |
| API Exception class | |
| ''' | |
| class APIException(Exception): | |
| def __init__(self, status, _type): | |
| super().__init__(f'[{status}] {_type}') | |
| self.status = status | |
| self.type = _type | |
| ''' | |
| Empty class containing a list of all the supported models | |
| ''' | |
| class Model(): | |
| GPT_4O_MINI = 'gpt-4o-mini' | |
| CLAUDE_3_HAIKU = 'claude-3-haiku-20240307' | |
| LLAMA_3_1_70B = 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | |
| MIXTRAL_8X7B = 'mistralai/Mixtral-8x7B-Instruct-v0.1' | |
| def getAll(*args) -> list[str]: | |
| return [m for m in vars(Model).keys() if not m.startswith('__') and m != 'getAll'] | |
| ''' | |
| Main class | |
| ''' | |
| class Chat(): | |
| def __init__(self, model, prompts=None): | |
| self.model = model | |
| self.session = requests.Session() | |
| self.session.headers = { | |
| 'cookie': 'dcm=5; dcs=1', | |
| 'accept': 'text/event-stream', | |
| 'accept-language': 'en', | |
| 'content-type': 'application/json', | |
| 'origin': 'https://duckduckgo.com', | |
| 'priority': 'u=1, i', | |
| 'referer': 'https://duckduckgo.com/', | |
| 'sec-fetch-user': '', | |
| 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', | |
| 'x-vqd-accept': '1' | |
| } | |
| self.resetPayload() | |
| self.initialize() | |
| if prompts: | |
| if type(prompts) == str: | |
| prompts = [prompts] | |
| for prompt in prompts: | |
| self.payload.get('messages').append({ | |
| 'role': 'user', | |
| 'content': prompt | |
| }) | |
| ''' | |
| Returns the size of payload which is sent with every API call (basically the whole conversation). | |
| Useful to track the usage, since DuckDuckGo tends to block conversations when they get too long | |
| ''' | |
| def getPayloadSize(self) -> int: | |
| return len(json.loads(self.payload)) | |
| ''' | |
| Resets the payload (whole conversation will be lost). | |
| Useful to stay inside DuckDuckGo's API limits | |
| ''' | |
| def resetPayload(self): | |
| self.payload = { | |
| 'model': self.model, | |
| 'messages': list() | |
| } | |
| ''' | |
| Detects errors and raises the corresponding exception. | |
| ''' | |
| def manageErrors(self, response): | |
| if not response.headers.get('Content-Type').startswith('application/json'): | |
| return | |
| output = response.json() | |
| if output.get('action') == 'error': | |
| raise APIException(output.get('status'), output.get('type')) | |
| ''' | |
| Initializes the environment to work with DuckDuckGo's API. | |
| Automatically called in constructor | |
| ''' | |
| def initialize(self): | |
| response = self.session.get('https://duckduckgo.com/duckchat/v1/status') | |
| self.manageErrors(response) | |
| self.session.headers['x-vqd-4'] = response.headers.get('x-vqd-4') | |
| del self.session.headers['x-vqd-accept'] | |
| ''' | |
| Parses the incoming event stream obtained from communication with the API. | |
| Returns a list of dictionaries (chat objects). | |
| ''' | |
| def parseStream(self, stream: str) -> list[dict]: | |
| lines = stream.splitlines() | |
| components = list() | |
| for line in lines: | |
| if line.startswith('data:'): | |
| sub = line[6:] | |
| if sub == '[DONE]': break | |
| components.append(json.loads(sub)) | |
| return components | |
| ''' | |
| Communicates with the LLM. | |
| Returns a string containing the response | |
| ''' | |
| def communicate(self, content: str) -> str: | |
| self.payload.get('messages').append({ | |
| 'role': 'user', | |
| 'content': content | |
| }) | |
| response = requests.post('https://duckduckgo.com/duckchat/v1/chat', json=self.payload, headers=self.session.headers) | |
| if response.headers.get('Content-Type') != 'text/event-stream': | |
| return '' | |
| self.manageErrors(response) | |
| self.session.headers['x-vqd-4'] = response.headers.get('x-vqd-4') | |
| finalMessage = str() | |
| components = self.parseStream(response.content.decode()) | |
| for c in components: | |
| text = c.get('message') | |
| finalMessage += text if text else '' | |
| self.payload.get('messages').append({ | |
| 'role': 'assistant', | |
| 'content': finalMessage | |
| }) | |
| return finalMessage | |
| if __name__ == '__main__': | |
| models = Chat.models.getAll() | |
| for i in range(len(models)): | |
| print(f'{i+1}. {models[i]}') | |
| while True: | |
| choice = input('Please choose your model: ') | |
| try: | |
| choice = int(choice) - 1 | |
| except: | |
| print('\033[0;31mInvalid numeric value provided, please try again.\033[0m') | |
| continue | |
| if choice < 0 or choice > len(models) - 1: | |
| print('\033[0;31mInvalid number provided, please try again.\033[0m') | |
| continue | |
| else: | |
| break | |
| print('') | |
| model = getattr(Chat.models, models[choice]) | |
| bot = Chat(model) | |
| while True: | |
| try: | |
| you = input('\033[0mYou: ') | |
| print('\033[0;35m' + bot.communicate(you)) | |
| except KeyboardInterrupt: | |
| break | |
| except Exception: | |
| print('\033[0;31mAn error has occured.\033[0m') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment