diff --git a/plugins_config.yaml b/plugins_config.yaml new file mode 100644 index 0000000..804b698 --- /dev/null +++ b/plugins_config.yaml @@ -0,0 +1,8 @@ +# plugins_config.yaml +plugins: + - name: MaterialsProjectPlugin + description: A plugin to interact with the Materials Project API. + version: 0.1.0 + settings: + api_key: "your_api_key_here" + base_url: "https://next-gen.materialsproject.org/api/v2/" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..edda9da --- /dev/null +++ b/setup.py @@ -0,0 +1,29 @@ +from setuptools import setup, find_packages + +setup( + name='your_plugin_name', + version='0.1.0', + packages=find_packages(where='src'), + package_dir={'': 'src'}, + install_requires=[ + 'requests', # Add any other dependencies your plugin needs + ], + entry_points={ + 'auto_gpt_plugins': [ + 'your_plugin_name = auto_gpt_plugin_template.your_plugin_module:MaterialsProjectPlugin', + ], + }, + description='A plugin for AutoGPT to interact with the Materials Project API', + long_description=open('README.md').read(), + long_description_content_type='text/markdown', + author='Your Name', + author_email='your.email@example.com', + url='https://github.com/yourusername/your_plugin_name', # Update with your repository URL + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3', + ], + python_requires='>=3.6', +) diff --git a/src/auto_gpt_plugin_template/abstract_singleton.py b/src/auto_gpt_plugin_template/abstract_singleton.py new file mode 100644 index 0000000..fb04bff --- /dev/null +++ b/src/auto_gpt_plugin_template/abstract_singleton.py @@ -0,0 +1,35 @@ +# src/auto_gpt_plugin_template/abstract_singleton.py + +class SingletonMeta(type): + """A metaclass for implementing the Singleton pattern.""" + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] + + +class AbstractSingleton(metaclass=SingletonMeta): + """Abstract base class for Singleton pattern.""" + + def __init__(self): + if not hasattr(self, '_initialized'): + self._initialized = True + self.initialize() + + def initialize(self): + """Initialize the singleton instance. + + This method can be overridden in subclasses to perform custom initialization. + """ + pass + + @classmethod + def get_instance(cls): + """Retrieve the singleton instance. + + This method provides access to the singleton instance from outside the class. + """ + return cls() diff --git a/src/auto_gpt_plugin_template/materials_plugin.py b/src/auto_gpt_plugin_template/materials_plugin.py new file mode 100644 index 0000000..7dde17a --- /dev/null +++ b/src/auto_gpt_plugin_template/materials_plugin.py @@ -0,0 +1,95 @@ +import requests +from typing import Any, Dict, List, Optional, Tuple +from .. import AutoGPTPluginTemplate + +class MaterialsProjectPlugin(AutoGPTPluginTemplate): + def __init__(self): + super().__init__() + self._name = "MaterialsProjectPlugin" + self._version = "0.1.0" + self._description = "A plugin to make API requests to the Materials Project API." + self.api_key = "vYixarnBRye6p1l9eCIZk6XIRNHY4spO" + self.base_url = "https://next-gen.materialsproject.org/api/v2/" + + def can_handle_on_response(self) -> bool: + return True + + def on_response(self, response: str, *args, **kwargs) -> str: + endpoint = "materials" + params = {"search": response} + headers = {"X-API-KEY": self.api_key} + + api_response = requests.get(f"{self.base_url}{endpoint}", headers=headers, params=params) + return api_response.json() + + def can_handle_post_prompt(self) -> bool: + return False + + def post_prompt(self, prompt: Any) -> Any: + pass + + def can_handle_on_planning(self) -> bool: + return False + + def on_planning(self, prompt: Any, messages: List[Dict[str, str]]) -> Optional[str]: + pass + + def can_handle_post_planning(self) -> bool: + return False + + def post_planning(self, response: str) -> str: + pass + + def can_handle_pre_instruction(self) -> bool: + return False + + def pre_instruction(self, messages: List[Dict[str, str]]) -> List[Dict[str, str]]: + pass + + def can_handle_on_instruction(self) -> bool: + return False + + def on_instruction(self, messages: List[Dict[str, str]]) -> Optional[str]: + pass + + def can_handle_post_instruction(self) -> bool: + return False + + def post_instruction(self, response: str) -> str: + pass + + def can_handle_pre_command(self) -> bool: + return False + + def pre_command(self, command_name: str, arguments: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]: + pass + + def can_handle_post_command(self) -> bool: + return False + + def post_command(self, command_name: str, response: str) -> str: + pass + + def can_handle_chat_completion(self, messages: Dict[Any, Any], model: str, temperature: float, max_tokens: int) -> bool: + return False + + def handle_chat_completion(self, messages: List[Dict[str, str]], model: str, temperature: float, max_tokens: int) -> str: + pass + + def can_handle_text_embedding(self, text: str) -> bool: + return False + + def handle_text_embedding(self, text: str) -> list: + pass + + def can_handle_user_input(self, user_input: str) -> bool: + return False + + def user_input(self, user_input: str) -> str: + pass + + def can_handle_report(self) -> bool: + return False + + def report(self, message: str) -> None: + pass