Skip to content

Commit

Permalink
chore: add codespell, apply spell fixes, and update contributing (#767)
Browse files Browse the repository at this point in the history
  • Loading branch information
mspronesti authored Nov 21, 2023
1 parent a61a538 commit abd1538
Show file tree
Hide file tree
Showing 23 changed files with 283 additions and 48 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ jobs:
- name: Install dependencies
run: poetry install --all-extras
- name: Lint with ruff
run: poetry run ruff pandasai examples
run: |
poetry run ruff pandasai examples
poetry run ruff format pandasai examples --diff
- name: Spellcheck
run: poetry run codespell pandasai docs examples tests
- name: Run tests
run: poetry run pytest
- name: Run code coverage
Expand Down
11 changes: 10 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,18 @@ Make sure that the linter does not report any errors or warnings before submitti
We use `ruff` to reformat the code by running the following command:

```bash
ruff format pandasai
ruff format pandasai examples
```

### Spell check

We usee `codespell` to check the spelling of our code. You can run codespell by running the following command:

```bash
codespell pandasai docs examples -w
```


### 🧪 Testing

We use `pytest` to test our code. You can run the tests by running the following command:
Expand Down
2 changes: 1 addition & 1 deletion docs/building_docs.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ Below is the rundown of documentation structure for `pandasai`, you need to know
2. copy `mkdocs.yml`, `.readthedocs.yaml` and the `docs/` folder into your project root.
3. `docs/API` contains the API documentation created using `docstring`. For any new module, add the links here
4. Project is using standard Google Docstring Style.
5. Rebuild the documenation locally to see that it works.
5. Rebuild the documentation locally to see that it works.
6. Documentation are hosted on [Read the Docs tutorial](https://docs.readthedocs.io/en/stable/tutorial/)

> Define the release version in `mkdocs.yml` file.
Expand Down
2 changes: 1 addition & 1 deletion docs/custom-prompts.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class MyCustomPrompt(AbstractPrompt):
return """This is your custom text for your prompt with custom {my_custom_value}"""

def setup(self, kwargs):
# This method is called before the prompt is intialized
# This method is called before the prompt is initialized
# You can use it to setup your prompt and pass any additional
# variables to the template
self.set_var("my_custom_value", kwargs["my_custom_value"])
Expand Down
2 changes: 1 addition & 1 deletion examples/from_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
}
)

# With a Sqlite databse
# With a Sqlite database

invoice_connector = SqliteConnector(
config={
Expand Down
2 changes: 1 addition & 1 deletion examples/with_multiple_dataframes.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,6 @@
[employees_df, salaries_df],
config={"llm": llm, "verbose": True},
)
response = dl.chat("Plot salaries againt name")
response = dl.chat("Plot salaries against name")
print(response)
# Output: <displays the plot>
2 changes: 1 addition & 1 deletion pandasai/agent/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def explain(self) -> str:
)
response = self._call_llm_with_prompt(prompt)
self._logger.log(
f"""Explaination: {response}
f"""Explanation: {response}
"""
)
return response
Expand Down
2 changes: 1 addition & 1 deletion pandasai/assets/prompt_templates/current_code.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
{default_import}

"""
The variable `dfs: list[pd.DataFrame]` is already decalared.
The variable `dfs: list[pd.DataFrame]` is already declared.
{instructions}

Return a "result" variable dict:
Expand Down
2 changes: 1 addition & 1 deletion pandasai/connectors/airtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ def head(self):
Returns :
DatFrameType: The head of the data source
that the conector is connected to .
that the connector is connected to .
"""
data = self._request_api(params={"maxRecords": 5})
return pd.DataFrame(
Expand Down
4 changes: 2 additions & 2 deletions pandasai/connectors/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def _validate_column_name(self, column_name):
def _build_query(self, limit=None, order=None):
base_query = select("*").select_from(text(self._config.table))
if self._config.where or self._additional_filters:
# conditions is the list of wher + additional filters
# conditions is the list of where + additional filters
conditions = []
if self._config.where:
conditions += self._config.where
Expand Down Expand Up @@ -412,7 +412,7 @@ class SqliteConnector(SQLConnector):

def __init__(self, config: Union[SqliteConnectorConfig, dict]):
"""
Intialize the Sqlite connector with the given configuration.
Initialize the Sqlite connector with the given configuration.
Args:
config (ConnectorConfig) : The configuration for the MySQL connector.
Expand Down
8 changes: 4 additions & 4 deletions pandasai/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
class InvalidRequestError(Exception):

"""
Raised when the request is not succesfull.
Raised when the request is not successful.
Args :
Exception (Exception): InvalidRequestError
Expand Down Expand Up @@ -80,7 +80,7 @@ def __init__(self, model_name):

class MissingModelError(Exception):
"""
Raised when deployment name is not passed to azure as it's a required paramter
Raised when deployment name is not passed to azure as it's a required parameter
Args:
Exception (Exception): MissingModelError
Expand Down Expand Up @@ -166,7 +166,7 @@ class InvalidWorkspacePathError(Exception):

class InvalidConfigError(Exception):
"""
Raised when config value is not appliable
Raised when config value is not applicable
Args:
Exception (Exception): InvalidConfigError
"""
Expand All @@ -176,5 +176,5 @@ class MaliciousQueryError(Exception):
"""
Raise error if malicious query is generated
Args:
Exception (Excpetion): MaliciousQueryError
Exception (Exception): MaliciousQueryError
"""
4 changes: 2 additions & 2 deletions pandasai/helpers/code_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def _get_nearest_func_call(current_lineno, calls, func_name):
@staticmethod
def _tokenize_operand(operand_node: ast.expr) -> Generator[str, None, None]:
"""
Utility generator function to get subscript slice contants.
Utility generator function to get subscript slice constants.
Args:
operand_node (ast.expr):
Expand Down Expand Up @@ -453,7 +453,7 @@ def _get_df_id_by_nearest_assignment(
current_lineno: int, assignments: list[ast.Assign], target_name: str
):
"""
Utility function to get df label by finding the nearest assigment.
Utility function to get df label by finding the nearest assignment.
Sort assignment nodes list (copy of the list) by line number.
Iterate over the assignment nodes list. If the assignment node's value
Expand Down
2 changes: 1 addition & 1 deletion pandasai/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _polish_code(self, code: str) -> str:
removing the imports and removing trailing spaces and new lines.
Args:
code (str): A sting of Python code.
code (str): A string of Python code.
Returns:
str: Polished code.
Expand Down
4 changes: 2 additions & 2 deletions pandasai/pipelines/logic_units/prompt_execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
class PromptExecution(BaseLogicUnit):
def execute(self, input: FileBasedPrompt, **kwargs) -> Any:
config = kwargs.get("config")
if config is None or getattr(config, 'llm', None) is None:
if config is None or getattr(config, "llm", None) is None:
raise LLMNotFoundError()
llm = getattr(config, 'llm')
llm = getattr(config, "llm")
return llm.call(input)
2 changes: 1 addition & 1 deletion pandasai/pipelines/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(
logger: Optional[Logger] = None,
):
"""
Intialize the pipeline with given context and configuration
Initialize the pipeline with given context and configuration
parameters.
Args :
context (Context) : Context is required for ResponseParsers.
Expand Down
2 changes: 1 addition & 1 deletion pandasai/pipelines/smart_datalake_chat/result_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _add_result_to_memory(self, result: dict, context: PipelineContext):
Args:
result (dict): The result to add to the memory
context (PipelineContext) : Pipleline Context
context (PipelineContext) : Pipeline Context
"""
if result is None:
return
Expand Down
2 changes: 1 addition & 1 deletion pandasai/smart_datalake/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,7 @@ def prepare_context_for_smart_datalake_pipeline(
self, query: str, output_type: Optional[str] = None
) -> PipelineContext:
"""
Prepare Pipeline Context to intiate Smart Data Lake Pipeline.
Prepare Pipeline Context to initiate Smart Data Lake Pipeline.
Args:
query (str): Query to run on the dataframe
Expand Down
Loading

0 comments on commit abd1538

Please sign in to comment.