From b9f84bbe6946fcc4c90a51c815a7c88cb540b97f Mon Sep 17 00:00:00 2001 From: Kye Date: Tue, 5 Mar 2024 12:06:29 -0800 Subject: [PATCH] [CLEANUP]: --- .pre-commit-config.yaml | 48 +++-- Cargo.toml | 97 +++++++++ Makefile | 186 +++++++++++++++--- README.md | 99 +++++----- package/main.py | 0 package/subfolder/__init__.py | 0 package/subfolder/main.py | 0 pyproject.toml | 97 +++++++-- src/.DS_Store | Bin 0 -> 6148 bytes src/concurrent_exec.rs | 93 +++++++++ src/cuda_wrapper.rs | 71 +++++++ src/file_utils.rs | 37 ++++ src/main.rs | 59 ++++++ src/multi_threading.rs | 113 +++++++++++ .../__init__.py => swarms_core/__init__..py | 0 15 files changed, 800 insertions(+), 100 deletions(-) create mode 100644 Cargo.toml delete mode 100644 package/main.py delete mode 100644 package/subfolder/__init__.py delete mode 100644 package/subfolder/main.py create mode 100644 src/.DS_Store create mode 100644 src/concurrent_exec.rs create mode 100644 src/cuda_wrapper.rs create mode 100644 src/file_utils.rs create mode 100644 src/main.rs create mode 100644 src/multi_threading.rs rename package/__init__.py => swarms_core/__init__..py (100%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ae0a4fc..cba11ae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,18 +1,32 @@ +fail_fast: true + repos: - - repo: https://github.com/ambv/black - rev: 22.3.0 - hooks: - - id: black - - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: 'v0.0.255' - hooks: - - id: ruff - args: [--fix] - - repo: https://github.com/nbQA-dev/nbQA - rev: 1.6.3 - hooks: - - id: nbqa-black - additional_dependencies: [ipython==8.12, black] - - id: nbqa-ruff - args: ["--ignore=I001"] - additional_dependencies: [ipython==8.12, ruff] \ No newline at end of file +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-yaml + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + +- repo: local + hooks: + - id: lint-python + name: Lint Python + entry: make lint-python + types: [python] + language: system + pass_filenames: false + - id: typecheck-python + name: Typecheck Python + entry: make pyright + types: [python] + language: system + pass_filenames: false + - id: lint-rust + name: Lint Rust + entry: make lint-rust + types: [rust] + language: system + pass_filenames: false \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..9cd72e1 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,97 @@ +[package] +name = "swarms-core" +version = "0.0.1" +edition = "2021" +license = "MIT" +homepage = "https://github.com/kyegomez/swarms-core" +repository = "https://github.com/kyegomez/swarms-core.git" +readme = "README.md" +include = [ + "/pyproject.toml", + "/README.md", + "/LICENSE", + "/Makefile", + "/build.rs", + "/generate_self_schema.py", + "/rust-toolchain", + "/src", + "!/src/self_schema.py", + "/python/pydantic_core", + "/tests", + "/.cargo", + "!__pycache__", + "!tests/.hypothesis", + "!tests/.pytest_cache", + "!*.so", +] +rust-version = "1.70" + +[dependencies] +pyo3 = { version = "0.20.3", features = ["generate-import-lib", "num-bigint"] } +regex = "1.10.3" +strum = { version = "0.25.0", features = ["derive"] } +smallvec = "1.13.1" +base64 = "0.21.7" + +[lib] +name = "_pydantic_core" +crate-type = ["cdylib", "rlib"] + +[features] +# must be enabled when building with `cargo build`, maturin enables this automatically +extension-module = ["pyo3/extension-module"] + +[profile.release] +lto = "fat" +codegen-units = 1 +strip = true + +[profile.bench] +debug = true +strip = false + +# This is separate to benchmarks because `bench` ends up building testing +# harnesses into code, as it's a special cargo profile. +[profile.profiling] +inherits = "release" +debug = true +strip = false + +[dev-dependencies] +pyo3 = { version = "0.20.3", features = ["auto-initialize"] } + +[build-dependencies] +version_check = "0.9.4" +# used where logic has to be version/distribution specific, e.g. pypy +pyo3-build-config = { version = "0.20.2" } + +[lints.clippy] +dbg_macro = "warn" +print_stdout = "warn" + +# in general we lint against the pedantic group, but we will whitelist +# certain lints which we don't want to enforce (for now) +pedantic = { level = "warn", priority = -1 } +cast_possible_truncation = "allow" +cast_possible_wrap = "allow" +cast_precision_loss = "allow" +cast_sign_loss = "allow" +doc_markdown = "allow" +float_cmp = "allow" +fn_params_excessive_bools = "allow" +if_not_else = "allow" +manual_let_else = "allow" +match_bool = "allow" +match_same_arms = "allow" +missing_errors_doc = "allow" +missing_panics_doc = "allow" +module_name_repetitions = "allow" +must_use_candidate = "allow" +needless_pass_by_value = "allow" +similar_names = "allow" +single_match_else = "allow" +struct_excessive_bools = "allow" +too_many_lines = "allow" +unnecessary_wraps = "allow" +unused_self = "allow" +used_underscore_binding = "allow" \ No newline at end of file diff --git a/Makefile b/Makefile index a99809c..827061c 100644 --- a/Makefile +++ b/Makefile @@ -1,22 +1,164 @@ -.PHONY: style check_code_quality - -export PYTHONPATH = . -check_dirs := src - -style: - black $(check_dirs) - isort --profile black $(check_dirs) - -check_code_quality: - black --check $(check_dirs) - isort --check-only --profile black $(check_dirs) - # stop the build if there are Python syntax errors or undefined names - flake8 $(check_dirs) --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. E203 for black, E501 for docstring, W503 for line breaks before logical operators - flake8 $(check_dirs) --count --max-line-length=88 --exit-zero --ignore=D --extend-ignore=E203,E501,W503 --statistics - -publish: - python setup.py sdist bdist_wheel - twine upload -r testpypi dist/* -u ${PYPI_USERNAME} -p ${PYPI_TEST_PASSWORD} --verbose - twine check dist/* - twine upload dist/* -u ${PYPI_USERNAME} -p ${PYPI_PASSWORD} --verbose \ No newline at end of file +.DEFAULT_GOAL := all +sources = python/pydantic_core tests generate_self_schema.py wasm-preview/run_tests.py + +mypy-stubtest = python -m mypy.stubtest pydantic_core._pydantic_core --allowlist .mypy-stubtest-allowlist + +# using pip install cargo (via maturin via pip) doesn't get the tty handle +# so doesn't render color without some help +export CARGO_TERM_COLOR=$(shell (test -t 0 && echo "always") || echo "auto") +# maturin develop only makes sense inside a virtual env, is otherwise +# more or less equivalent to pip install -e just a little nicer +USE_MATURIN = $(shell [ "$$VIRTUAL_ENV" != "" ] && (which maturin)) + +.PHONY: install +install: + pip install -U pip wheel pre-commit + pip install -r tests/requirements.txt + pip install -r tests/requirements-linting.txt + pip install -e . + pre-commit install + +.PHONY: install-rust-coverage +install-rust-coverage: + cargo install rustfilt coverage-prepare + rustup component add llvm-tools-preview + +.PHONY: install-pgo + rustup component add llvm-tools-preview + +.PHONY: build-dev +build-dev: + @rm -f python/pydantic_core/*.so +ifneq ($(USE_MATURIN),) + maturin develop +else + pip install -v -e . --config-settings=build-args='--profile dev' +endif + +.PHONY: build-prod +build-prod: + @rm -f python/pydantic_core/*.so +ifneq ($(USE_MATURIN),) + maturin develop --release +else + pip install -v -e . +endif + +.PHONY: build-profiling +build-profiling: + @rm -f python/pydantic_core/*.so +ifneq ($(USE_MATURIN),) + maturin develop --profile profiling +else + pip install -v -e . --config-settings=build-args='--profile profiling' +endif + +.PHONY: build-coverage +build-coverage: + @rm -f python/pydantic_core/*.so +ifneq ($(USE_MATURIN),) + RUSTFLAGS='-C instrument-coverage' maturin develop --release +else + RUSTFLAGS='-C instrument-coverage' pip install -v -e . +endif + +.PHONY: build-pgo +build-pgo: + @rm -f python/pydantic_core/*.so + $(eval PROFDATA := $(shell mktemp -d)) +ifneq ($(USE_MATURIN),) + RUSTFLAGS='-Cprofile-generate=$(PROFDATA)' maturin develop --release +else + RUSTFLAGS='-Cprofile-generate=$(PROFDATA)' pip install -v -e . +endif + pytest tests/benchmarks + $(eval LLVM_PROFDATA := $(shell rustup run stable bash -c 'echo $$RUSTUP_HOME/toolchains/$$RUSTUP_TOOLCHAIN/lib/rustlib/$$(rustc -Vv | grep host | cut -d " " -f 2)/bin/llvm-profdata')) + $(LLVM_PROFDATA) merge -o $(PROFDATA)/merged.profdata $(PROFDATA) +ifneq ($(USE_MATURIN),) + RUSTFLAGS='-Cprofile-use=$(PROFDATA)/merged.profdata' maturin develop --release +else + RUSTFLAGS='-Cprofile-use=$(PROFDATA)/merged.profdata' pip install -v -e . +endif + @rm -rf $(PROFDATA) + + +.PHONY: build-wasm +build-wasm: + @echo 'This requires python 3.11, maturin and emsdk to be installed' + maturin build --release --target wasm32-unknown-emscripten --out dist -i 3.11 + ls -lh dist + +.PHONY: format +format: + ruff check --fix $(sources) + ruff format $(sources) + cargo fmt + +.PHONY: lint-python +lint-python: + ruff check $(sources) + ruff format --check $(sources) + $(mypy-stubtest) + griffe dump -f -d google -LWARNING -o/dev/null python/pydantic_core + +.PHONY: lint-rust +lint-rust: + cargo fmt --version + cargo fmt --all -- --check + cargo clippy --version + cargo clippy --tests -- -D warnings + +.PHONY: lint +lint: lint-python lint-rust + +.PHONY: pyright +pyright: + pyright + +.PHONY: test +test: + pytest + +.PHONY: testcov +testcov: build-coverage + @rm -rf htmlcov + @mkdir -p htmlcov + coverage run -m pytest + coverage report + coverage html -d htmlcov/python + coverage-prepare html python/pydantic_core/*.so + +.PHONY: all +all: format build-dev lint test + +.PHONY: flame +flame: + @rm -rf perf.data* + @rm -rf flame + @mkdir -p flame + perf record -g profiling/dict_model.py + perf script --max-stack 20 | stackcollapse-perf.pl | flamegraph.pl > flame/python.svg + perf script --max-stack 20 | stackcollapse-perf.pl > flame/python.txt + @rm perf.data + JSON=1 perf record -g profiling/dict_model.py + perf script --max-stack 20 | stackcollapse-perf.pl | flamegraph.pl > flame/json.svg + perf script --max-stack 20 | stackcollapse-perf.pl > flame/json.txt + @rm perf.data + +.PHONY: clean +clean: + rm -rf `find . -name __pycache__` + rm -f `find . -type f -name '*.py[co]' ` + rm -f `find . -type f -name '*~' ` + rm -f `find . -type f -name '.*~' ` + rm -rf src/self_schema.py + rm -rf .cache + rm -rf flame + rm -rf htmlcov + rm -rf .pytest_cache + rm -rf *.egg-info + rm -f .coverage + rm -f .coverage.* + rm -rf build + rm -rf perf.data* + rm -rf python/pydantic_core/*.so \ No newline at end of file diff --git a/README.md b/README.md index b2d5137..7e3e03f 100644 --- a/README.md +++ b/README.md @@ -1,64 +1,73 @@ -[![Multi-Modality](agorabanner.png)](https://discord.gg/qUtxnK2NMf) +# swarms-core -# Python Package Template -A easy, reliable, fluid template for python packages complete with docs, testing suites, readme's, github workflows, linting and much much more +[![CI](https://github.com/kyegomez/swarms-core/workflows/ci/badge.svg?event=push)](https://github.com/kyegomez/swarms-core/actions?query=event%3Apush+branch%3Amain+workflow%3Aci) +[![Coverage](https://codecov.io/gh/kyegomez/swarms-core/branch/main/graph/badge.svg)](https://codecov.io/gh/kyegomez/swarms-core) +[![pypi](https://img.shields.io/pypi/v/swarms-core.svg)](https://pypi.python.org/pypi/swarms-core) +[![versions](https://img.shields.io/pypi/pyversions/swarms-core.svg)](https://github.com/kyegomez/swarms-core) +[![license](https://img.shields.io/github/license/kyegomez/swarms-core.svg)](https://github.com/kyegomez/swarms-core/blob/main/LICENSE) +This package provides the core functionality for [Swarms](https://github.com/kyegomez/swarms) exeuction strategies utilizing RUST. -## Installation -You can install the package using pip +# Install +`pip3 install -U swarms-core` -```bash -pip install -e . -``` - -# Usage -```python -print("hello world") - -``` - - - -### Code Quality ๐Ÿงน -- `make style` to format the code -- `make check_code_quality` to check code quality (PEP8 basically) -- `black .` -- `ruff . --fix` -### Tests ๐Ÿงช +## Getting Started -[`pytests`](https://docs.pytest.org/en/7.1.x/) is used to run our tests. +You'll need rust stable [installed](https://rustup.rs/), or rust nightly if you want to generate accurate coverage. -### Publish on PyPi ๐Ÿš€ +With rust and python 3.8+ installed, compiling swarms-core should be possible with roughly the following: -**Important**: Before publishing, edit `__version__` in [src/__init__](/src/__init__.py) to match the wanted new version. - -``` -poetry build -poetry publish +```bash +# clone this repo or your fork +git clone git@github.com:kyegomez/swarms-core.git +cd swarms-core +# create a new virtual env +python3 -m venv env +source env/bin/activate +# install dependencies and install swarms-core +make install ``` -### CI/CD ๐Ÿค– - -We use [GitHub actions](https://github.com/features/actions) to automatically run tests and check code quality when a new PR is done on `main`. +That should be it, the example shown above should now run. -On any pull request, we will check the code quality and tests. +You might find it useful to look at [`python/pydantic_core/_pydantic_core.pyi`](./python/pydantic_core/_pydantic_core.pyi) and +[`python/pydantic_core/core_schema.py`](./python/pydantic_core/core_schema.py) for more information on the python API, +beyond that, [`tests/`](./tests) provide a large number of examples of usage. -When a new release is created, we will try to push the new code to PyPi. We use [`twine`](https://twine.readthedocs.io/en/stable/) to make our life easier. +If you want to contribute to swarms-core, you'll want to use some other make commands: +* `make build-dev` to build the package during development +* `make build-prod` to perform an optimised build for benchmarking +* `make test` to run the tests +* `make testcov` to run the tests and generate a coverage report +* `make lint` to run the linter +* `make format` to format python and rust code +* `make` to run `format build-dev lint test` -The **correct steps** to create a new realease are the following: -- edit `__version__` in [src/__init__](/src/__init__.py) to match the wanted new version. -- create a new [`tag`](https://git-scm.com/docs/git-tag) with the release name, e.g. `git tag v0.0.1 && git push origin v0.0.1` or from the GitHub UI. -- create a new release from GitHub UI +## Test Profiling -The CI will run when you create the new release. +It's possible to profile the code using the [`flamegraph` utility from `flamegraph-rs`](https://github.com/flamegraph-rs/flamegraph). (Tested on Linux.) You can install this with `cargo install flamegraph`. -# Docs -We use MK docs. This repo comes with the zeta docs. All the docs configurations are already here along with the readthedocs configs. +Run `make build-profiling` to install a release build with debugging symbols included (needed for profiling). +Once that is built, you can profile pytest benchmarks with (e.g.): - -# License -MIT +```bash +flamegraph -- pytest tests/benchmarks/test_micro_benchmarks.py -k test_list_of_ints_core_py --benchmark-enable +``` +The `flamegraph` command will produce an interactive SVG at `flamegraph.svg`. + +## Releasing + +1. Bump package version locally. Do not just edit `Cargo.toml` on Github, you need both `Cargo.toml` and `Cargo.lock` to be updated. +2. Make a PR for the version bump and merge it. +3. Go to https://github.com/kyegomez/swarms-core/releases and click "Draft a new release" +4. In the "Choose a tag" dropdown enter the new tag `v` and select "Create new tag on publish" when the option appears. +5. Enter the release title in the form "v " +6. Click Generate release notes button +7. Click Publish release +8. Go to https://github.com/kyegomez/swarms-core/actions and ensure that all build for release are done successfully. +9. Go to https://pypi.org/project/swarms-core/ and ensure that the latest release is published. +10. Done ๐ŸŽ‰ diff --git a/package/main.py b/package/main.py deleted file mode 100644 index e69de29..0000000 diff --git a/package/subfolder/__init__.py b/package/subfolder/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/package/subfolder/main.py b/package/subfolder/main.py deleted file mode 100644 index e69de29..0000000 diff --git a/pyproject.toml b/pyproject.toml index 5d4ac8e..7ab1020 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,35 +1,53 @@ [build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" +requires = [ + 'maturin>=1,<2', + 'typing-extensions >=4.6.0,!=4.7.0' +] +build-backend = 'maturin' [tool.poetry] -name = "paper" +name = "swarms-core" version = "0.0.1" -description = "Paper - Pytorch" +description = "Swarms Core - Python" license = "MIT" authors = ["Kye Gomez "] -homepage = "https://github.com/kyegomez/paper" -documentation = "https://github.com/kyegomez/paper" # Add this if you have documentation. +homepage = "https://github.com/kyegomez/swarms-core" +documentation = "https://github.com/kyegomez/swarms-core" # Add this if you have documentation. readme = "README.md" # Assuming you have a README.md -repository = "https://github.com/kyegomez/paper" +repository = "https://github.com/kyegomez/swarms-core" keywords = ["artificial intelligence", "deep learning", "optimizers", "Prompt Engineering"] classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Topic :: Scientific/Engineering :: Artificial Intelligence", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3.9" + 'Development Status :: 3 - Alpha', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3 :: Only', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Rust', + 'Framework :: Swarms', + 'Intended Audience :: Developers', + 'Intended Audience :: Information Technology', + 'License :: OSI Approved :: MIT License', + 'Operating System :: POSIX :: Linux', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: MacOS', + 'Typing :: Typed', ] +[project.urls] +Homepage = 'https://github.com/kyegomes/swarms-core' +Funding = 'https://github.com/sponsors/kyegomez' +Source = 'https://github.com/kyegomez/swarms-core' + + [tool.poetry.dependencies] python = "^3.6" swarms = "*" zetascale = "*" -[tool.poetry.dev-dependencies] -# Add development dependencies here - - [tool.poetry.group.lint.dependencies] ruff = "^0.1.6" types-toml = "^0.10.8.1" @@ -55,3 +73,50 @@ line-length = 70 line-length = 70 target-version = ['py38'] preview = true + + + + +[tool.maturin] +python-source = "python" +module-name = "pydantic_core._pydantic_core" +bindings = 'pyo3' +features = ["pyo3/extension-module"] + +[tool.ruff.lint] +extend-select = ['Q', 'RUF100', 'C90', 'I'] +extend-ignore = [ + 'E721', # using type() instead of isinstance() - we use this in tests +] +flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'} +mccabe = { max-complexity = 13 } +isort = { known-first-party = ['pydantic_core', 'tests'] } + +[tool.ruff.format] +quote-style = 'single' + +[tool.pytest.ini_options] +testpaths = 'tests' +log_format = '%(name)s %(levelname)s: %(message)s' +timeout = 30 +xfail_strict = true +# min, max, mean, stddev, median, iqr, outliers, ops, rounds, iterations +addopts = [ + '--benchmark-columns', 'min,mean,stddev,outliers,rounds,iterations', + '--benchmark-group-by', 'group', + '--benchmark-warmup', 'on', + '--benchmark-disable', # this is enable by `make benchmark` when you actually want to run benchmarks +] + +[tool.coverage.run] +source = ['pydantic_core'] +branch = true + +[tool.coverage.report] +precision = 2 +exclude_lines = [ + 'pragma: no cover', + 'raise NotImplementedError', + 'if TYPE_CHECKING:', + '@overload', +] diff --git a/src/.DS_Store b/src/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..5008ddfcf53c02e82d7eee2e57c38e5672ef89f6 GIT binary patch literal 6148 zcmeH~Jr2S!425mzP>H1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0 PyResult<()> { + m.add_function(wrap_pyfunction!(concurrent_exec, m)?)?; + Ok(()) +} + +/// This function wraps Python code in Rust concurrency for ultra high performance. +/// +/// # Arguments +/// +/// * `py_codes` - A vector of string slices that holds the Python codes to be executed. +/// * `timeout` - An optional duration to specify a timeout for the Python code execution. +/// * `num_threads` - The number of threads to use for executing the Python code. +/// * `error_handler` - A function to handle errors during Python code execution. +/// * `log_function` - A function to log the execution of the Python code. +/// * `result_handler` - A function to handle the results of the Python code execution. +/// +/// # Example +/// +/// ``` +/// let py_codes = vec!["print('Hello, World!')", "print('Hello, Rust!')"]; +/// let timeout = Some(Duration::from_secs(5)); +/// let num_threads = 4; +/// let error_handler = |e| eprintln!("Error: {}", e); +/// let log_function = |s| println!("Log: {}", s); +/// let result_handler = |r| println!("Result: {:?}", r); +/// execute_python_codes(py_codes, timeout, num_threads, error_handler, log_function, result_handler); +/// ``` + +#[pyfunction] +pub fn concurrent_exec( + py_codes: Vec<&str>, + timeout: Option, + num_threads: usize, + error_handler: F, + log_function: G, + result_handler: H, +) -> PyResult>> +where + F: Fn(&str), + G: Fn(&str), + H: Fn(&PyResult<()>), +{ + let gil = Python::acquire_gil(); + let py = gil.python(); + let py_codes = Arc::new(Mutex::new(py_codes)); + let results = Arc::new(Mutex::new(Vec::new())); + let pool = ThreadPool::new(num_threads); + + pool.install(|| { + py_codes.par_iter().for_each(|code| { + let locals = [("__name__", "__main__")].into_py_dict(py); + let globals = [("__name__", "__main__")].into_py_dict(py); + + log_function(&format!("Executing Python code: {}", code)); + let result = py.run(code, Some(globals), Some(locals)); + + match timeout { + Some(t) => { + let now = Instant::now(); + let timeout_thread = thread::spawn(move || { + while now.elapsed() < t { + if let Ok(_) = result { + break; + } + } + if now.elapsed() >= t { + error_handler(&format!("Python code execution timed out: {}", code)); + } + }); + + timeout_thread.join().unwrap(); + } + None => {} + } + + results.lock().unwrap().push(result.clone(result)); + result_handler(&result); + }); + }); + + pool.join(); + Ok(results.lock().unwrap().clone()) +} \ No newline at end of file diff --git a/src/cuda_wrapper.rs b/src/cuda_wrapper.rs new file mode 100644 index 0000000..7516088 --- /dev/null +++ b/src/cuda_wrapper.rs @@ -0,0 +1,71 @@ +use pyo3::prelude::*; +use rustacuda::prelude::*; +use rustacuda::memory::DeviceBox; +use std::error::Error; +use std::ffi::CString; + +#[pymodule] +fn rust_cuda(_py: Python, m: &PyModule) -> PyResult<()> { + #[pyfn(m, "execute_on_device")] + fn execute_on_device(py: Python, device_id: u32, a: f32, b: f32) -> PyResult { + /// The result of executing the CUDA operation. + let result = py.allow_threads(|| { + execute_cuda(device_id, a, b) + }); + match result { + Ok(res) => Ok(res), + Err(err) => Err(PyErr::new::(format!("{}", err))), + } + } + Ok(()) +} + +fn execute_cuda(device_id: u32, a: f32, b: f32) -> Result> { + rustacuda::init(CudaFlags::empty())?; + let device = Device::get_device(device_id)?; + /// Creates a new CUDA context and pushes it onto the current thread's stack. + /// + /// # Arguments + /// + /// * `flags` - The flags to be used when creating the context. + /// * `device` - The device on which the context will be created. + /// + /// # Returns + /// + /// The newly created CUDA context. + /// + /// # Errors + /// + /// Returns an error if the context creation fails. + /// + /// # Example + /// + /// ```rust + /// use swarms::cuda_wrapper::Context; + /// + /// let device = 0; + /// let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?; + /// ``` + pub fn create_and_push(flags: ContextFlags, device: i32) -> Result { + // implementation goes here + } + let context = Context::create_and_push(ContextFlags::MAP_HOST | ContextFlags::SCHED_AUTO, device)?; + let module_data = CString::new(include_str!("../resources/add.ptx"))?; + let module = Module::load_from_string(&module_data)?; + let stream = Stream::new(StreamFlags::NON_BLOCKING, None)?; + let mut x = DeviceBox::new(&a)?; + let mut y = DeviceBox::new(&b)?; + let mut result = DeviceBox::new(&0.0f32)?; + unsafe { + launch!(module.sum<<<1, 1, 0, stream>>>( + x.as_device_ptr(), + y.as_device_ptr(), + result.as_device_ptr(), + 1 + ))?; + } + stream.synchronize()?; + let mut result_host = 0.0f32; + result.copy_to(&mut result_host)?; + Ok(result_host) +} \ No newline at end of file diff --git a/src/file_utils.rs b/src/file_utils.rs new file mode 100644 index 0000000..62d8ccb --- /dev/null +++ b/src/file_utils.rs @@ -0,0 +1,37 @@ +use std::fs::File; +use std::io::prelude::*; +use std::time::Instant; +use std::io::{BufReader, io}; +use ranyon::prelude::{IntoParallelRefIterator, ParallelIterator}; + +fn read_file(path: &str) -> Vec { + /// Reads the contents of a file located at the specified path. + /// + /// # Arguments + /// + /// * `path` - The path to the file. + /// + /// # Returns + /// + /// A `Result` containing a vector of strings representing the lines of the file if the file was successfully read, + /// or an `io::Error` if there was an error reading the file. + /// + /// # Example + /// + /// ``` + /// use std::io; + /// use std::fs::File; + /// use std::io::BufReader; + /// + /// fn read_file(path: &str) -> io::Result> { + /// let contents: io::Result> = BufReader::new(File::open(path).expect("Could not open file")) + /// .lines() + /// .collect(); + /// contents + /// } + /// ``` + let contents: io::Result> = BufReader::new(File::open(path).expect("Could not open file")) + .lines() + .collect(); + return contents.expect("Could not read file"); +} \ No newline at end of file diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..5078139 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,59 @@ +use pyo3::prelude::*; +use pyo3::types::PyList; +use rayon::prelude::*; +use std::fs; +use std::time::Instant; + +// Define the new execute function +fn exec_concurrently(script_path: &str, threads: usize) -> PyResult<()> { + (0..threads).into_par_iter().for_each(|_| { + Python::with_gil(|py| { + let sys = py.import("sys").unwrap(); + let path: &PyList = match sys.getattr("path") { + Ok(path) => match path.downcast() { + Ok(path) => path, + Err(e) => { + eprintln!("Failed to downcast path: {:?}", e); + return; + } + }, + Err(e) => { + eprintln!("Failed to get path attribute: {:?}", e); + return; + } + }; + + if let Err(e) = path.append("lib/python3.11/site-packages") { + eprintln!("Failed to append path: {:?}", e); + } + + let script = fs::read_to_string(script_path).unwrap(); + py.run(&script, None, None).unwrap(); + }); + }); + Ok(()) +} + +fn main() -> PyResult<()> { + let args: Vec = std::env::args().collect(); + let threads = 20; + + if args.len() < 2 { + eprintln!("Usage: {} ", args[0]); + std::process::exit(1); + } + let script_path = &args[1]; + + let start = Instant::now(); + + // Call the execute function + exec_concurrently(script_path, threads)?; + + let duration = start.elapsed(); + match fs::write("/tmp/elapsed.time", format!("booting time: {:?}", duration)) { + Ok(_) => println!("Successfully wrote elapsed time to /tmp/elapsed.time"), + Err(e) => eprintln!("Failed to write elapsed time: {:?}", e), + } + + Ok(()) +} diff --git a/src/multi_threading.rs b/src/multi_threading.rs new file mode 100644 index 0000000..0e75606 --- /dev/null +++ b/src/multi_threading.rs @@ -0,0 +1,113 @@ +/// This module provides a multi-threading processor for executing Python modules and functions in parallel. +/// It utilizes the `rayon` crate for parallel processing and the `pyo3` crate for interacting with the Python interpreter. +/// The `multithreading_processor` function takes a vector of `PythonModule` structs and the number of threads to use. +/// Each `PythonModule` struct contains the name of the Python module, the name of the function to call, and any arguments to pass to the function. +/// The function imports the Python module, calls the specified function, and sends any errors encountered back to the main thread. +/// If an import error occurs, a `PythonError::ImportError` is returned. +/// If a function call error occurs, a `PythonError::FunctionError` is returned. + +use pyo3::prelude::*; +use pyo3::wrap_pyfunction; +use rayon::prelude::*; +use std::sync::mpsc::{channel, Sender}; +use std::sync::{Arc, Mutex}; +use log::{info, error}; + +struct PythonModule<'a> { + name: &'a str, + function: &'a str, +} + +enum PythonError { + ImportError(String), + FunctionError(String), +} + +#[pyfunction] +fn my_module(py: Python, m: &PyModule) -> PyResult<()> { + m.add_function(wrap_pyfunction!(process_python_modules, m)?)?; + Ok(()) +} + + + +/// The function returns `Ok(())` if all modules are processed successfully. +/// Note: This code assumes that the necessary dependencies (`pyo3`, `rayon`, `log`) are already imported and initialized. +/// +/// # Arguments +/// +/// * `modules` - A vector of `PythonModule` structs representing the Python modules and functions to execute. +/// * `num_threads` - The number of threads to use for parallel processing. +/// +/// # Examples +/// +/// ``` +/// use pyo3::types::PyModule; +/// use pyo3::types::PyResult; +/// use pyo3::prelude::*; +/// +/// struct PythonModule<'a> { +/// name: &'a str, +/// function: &'a str, +/// args: Vec<&'a str>, +/// } +/// +/// #[pymodule] +/// fn multithreading_processor(modules: Vec, num_threads: usize) -> Result<(), PythonError> { +/// // Function implementation +/// Ok(()) +/// } +/// ``` +/// +/// # Errors +/// +/// Returns a `PythonError` if an import error or a function call error occurs. +/// +/// # Panics +/// +/// This function does not panic. +/// +/// # Safety +/// +/// This function is safe to call, but it assumes that the necessary dependencies (`pyo3`, `rayon`, `log`) are already imported and initialized. +// Initialize Python interpreter +#[pyfunction] +fn process_python_modules(modules: Vec, num_threads: usize) -> Result<(), PythonError> { + + let gil = Python::acquire_gil(); + let py = gil.python(); + + // Set the global thread pool's configuration + rayon::ThreadPoolBuilder::new() + .num_threads(num_threads) + .build_global() + .unwrap(); + + // Create a channel to send errors from threads to the main thread + let (tx, rx) = channel(); + let tx = Arc::new(Mutex::new(tx)); + + // Process each Python module in parallel + modules.par_iter().for_each(|module| { + let result = PyModule::import(py, module.name) + .map_err(|_| PythonError::ImportError(module.name.to_string())) + .and_then(|m| m.call0(module.function) + .map_err(|_| PythonError::FunctionError(module.function.to_string()))); + + if let Err(e) = result { + let tx = tx.lock().unwrap(); + tx.send(e).unwrap(); + } + }); + + // Check for errors + drop(tx); // Close the sender + for error in rx { + match error { + PythonError::ImportError(module) => error!("Failed to import module {}", module), + PythonError::FunctionError(function) => error!("Failed to call function {}", function), + } + } + + Ok(()) +} \ No newline at end of file diff --git a/package/__init__.py b/swarms_core/__init__..py similarity index 100% rename from package/__init__.py rename to swarms_core/__init__..py