Skip to content

Commit 676dc3f

Browse files
committedApr 5, 2024·
Add python tests to CI
1 parent 05b20dd commit 676dc3f

File tree

6 files changed

+48
-33
lines changed

6 files changed

+48
-33
lines changed
 

‎.github/workflows/python-package-conda.yml

-27
This file was deleted.

‎.github/workflows/python.yml

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
name: Python Package using Conda
2+
3+
on: [push]
4+
5+
jobs:
6+
build-linux:
7+
runs-on: ubuntu-latest
8+
strategy:
9+
max-parallel: 5
10+
11+
steps:
12+
- uses: actions/checkout@v3
13+
- name: Set up Conda
14+
uses: conda-incubator/setup-miniconda@v3
15+
with:
16+
python-version: "3.10"
17+
miniforge-version: latest
18+
use-mamba: true
19+
mamba-version: "*"
20+
- name: Test Python
21+
env:
22+
PYTHONPATH: /home/runner/work/tokengrams/tokengrams
23+
shell: bash -l {0}
24+
run: |
25+
mamba install -c conda-forge numpy pytest hypothesis maturin
26+
maturin develop
27+
maturin build
28+
python -m pip install --user ./target/wheels/tokengrams*.whl
29+
pytest

‎environment.yml

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
name: test
2+
channels:
3+
- conda-forge
4+
- defaults
5+
dependencies:
6+
- python=3.10
7+
- numpy
8+
- pytest
9+
- hypothesis
10+
- maturin

‎tokengrams/__init__.py

+3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
import os
2+
print(os.getcwd())
3+
14
from .tokengrams import (
25
InMemoryIndex,
36
MemmapIndex,

‎tokengrams/tests/test_gram_index.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -28,25 +28,25 @@ def check_gram_index(index: InMemoryIndex | MemmapIndex, tokens: list[int]):
2828
)
2929
def test_gram_index(tokens: list[int]):
3030
# Construct index
31-
index = InMemoryIndex(tokens)
31+
index = InMemoryIndex(tokens, False)
3232
check_gram_index(index, tokens)
3333

3434
# Save to disk and check that we can load it back
3535
with NamedTemporaryFile() as f:
3636
memmap = np.memmap(f, dtype=np.uint16, mode="w+", shape=(len(tokens),))
3737
memmap[:] = tokens
3838

39-
index = InMemoryIndex.from_token_file(f.name, None)
39+
index = InMemoryIndex.from_token_file(f.name, False, None)
4040
check_gram_index(index, tokens)
4141

4242
with NamedTemporaryFile() as idx:
43-
index = MemmapIndex.build(f.name, idx.name)
43+
index = MemmapIndex.build(f.name, idx.name, False)
4444
check_gram_index(index, tokens)
4545

4646
index = MemmapIndex(f.name, idx.name)
4747
check_gram_index(index, tokens)
4848

4949
# Now check limited token loading
5050
for limit in range(1, len(tokens) + 1):
51-
index = InMemoryIndex.from_token_file(f.name, limit)
51+
index = InMemoryIndex.from_token_file(f.name, False, limit)
5252
check_gram_index(index, tokens[:limit])

‎tokengrams/tokengrams.pyi

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
class InMemoryIndex:
22
"""An n-gram index."""
33

4-
def __init__(self, tokens: list[int]) -> None:
4+
def __init__(self, tokens: list[int], verbose: bool) -> None:
55
...
66

77
@staticmethod
8-
def from_token_file(path: str, token_limit: int | None, verbose: bool) -> "InMemoryIndex":
8+
def from_token_file(path: str, verbose: bool, token_limit: int | None) -> "InMemoryIndex":
99
"""Construct a `InMemoryIndex` from a file containing raw little-endian tokens."""
1010

1111
def contains(self, query: list[int]) -> bool:

0 commit comments

Comments
 (0)
Please sign in to comment.